diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 78d06c501e8835..fa97153cf17731 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -15,7 +15,7 @@
]
},
"microsoft.dotnet.xharness.cli": {
- "version": "9.0.0-prerelease.24208.1",
+ "version": "9.0.0-prerelease.24229.1",
"commands": [
"xharness"
]
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000000000..806f7fad67b045
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,12 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: daily
+ open-pull-requests-limit: 5
+ labels:
+ - area-codeflow
+ ignore:
+ - dependency-name: "actions/checkout"
+ update-types: ["version-update:semver-patch","version-update:semver-minor"]
diff --git a/.github/workflows/aspnetcore-sync.yml b/.github/workflows/aspnetcore-sync.yml
index 01aa3dfdabca60..582f371cf9ab49 100644
--- a/.github/workflows/aspnetcore-sync.yml
+++ b/.github/workflows/aspnetcore-sync.yml
@@ -16,14 +16,14 @@ jobs:
runs-on: windows-latest
steps:
- name: Checkout aspnetcore
- uses: actions/checkout@v2.0.0
+ uses: actions/checkout@v4
with:
# Test this script using changes in a fork
repository: 'dotnet/aspnetcore'
path: aspnetcore
ref: main
- name: Checkout runtime
- uses: actions/checkout@v2.0.0
+ uses: actions/checkout@v4
with:
# Test this script using changes in a fork
repository: 'dotnet/runtime'
@@ -42,7 +42,7 @@ jobs:
mkdir ..\artifacts
git status > ..\artifacts\status.txt
git diff > ..\artifacts\diff.txt
- - uses: actions/upload-artifact@v1
+ - uses: actions/upload-artifact@v4
with:
name: results
path: artifacts
@@ -57,7 +57,7 @@ jobs:
- name: Send PR
if: steps.check.outputs.changed == 'true'
# https://github.com/marketplace/actions/create-pull-request
- uses: dotnet/actions-create-pull-request@v3
+ uses: dotnet/actions-create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
path: .\runtime
diff --git a/.github/workflows/bump-chrome-version.yml b/.github/workflows/bump-chrome-version.yml
index fc2f09cf3d01f7..e1d1e89658ca02 100644
--- a/.github/workflows/bump-chrome-version.yml
+++ b/.github/workflows/bump-chrome-version.yml
@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Branch
run: |
git config user.name github-actions[bot]
@@ -47,7 +47,7 @@ jobs:
- name: Create PR
if: steps.check_changes.outputs.has_changes == 'true'
- uses: actions/github-script@v6
+ uses: actions/github-script@v7
with:
script: |
const { CHROME_LINUX_VER, CHROME_WIN_VER } = process.env;
diff --git a/Directory.Build.props b/Directory.Build.props
index b1ac2559f9ed1e..c6bae2a825b1bf 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -56,18 +56,22 @@
- eng/native/configurecompiler.cmake
- eng/native/build-commons.sh
- src/native/libs/build-native.sh
- - src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/DependencyAnalysis/ObjectWriter.cs
+ - src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/ObjectWriter/MachObjectWriter.cs
- src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
+ - src/mono/mono/tools/offsets-tool/offsets-tool.py
+ - src/mono/msbuild/apple/build/AppleBuild.targets
- src/installer/pkg/sfx/bundle/shared-framework-distribution-template-x64.xml
- src/installer/pkg/sfx/bundle/shared-framework-distribution-template-arm64.xml
+ - src/tasks/AotCompilerTask/MonoAOTCompiler.props
+ - src/tasks/AppleAppBuilder/Xcode.cs
- src/tasks/MobileBuildTasks/Apple/AppleProject.cs
- dotnet/installer repo > src/redist/targets/GeneratePKG.targets
-->
21
- 11.0
- 11.0
- 10.15
- 11.0
+ 12.2
+ 12.2
+ 12.0
+ 15.0
@@ -180,7 +184,9 @@
$([MSBuild]::NormalizePath('$(TestExclusionListTasksDir)', 'TestExclusionListTasks.dll'))
$([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'coreclr', '$(TargetOS).$(TargetArchitecture).$(RuntimeConfiguration)'))
$(CoreCLRToolPath)
+
$([MSBuild]::NormalizeDirectory($(ArtifactsObjDir), 'wasmtime'))
true
$([MSBuild]::NormalizeDirectory($(WasmProjectRoot), 'build'))
@@ -397,12 +403,10 @@
true
true
- true
- true
- true
+ true
- false
+ false
@@ -413,7 +417,7 @@
'$(IsReferenceAssemblyProject)' != 'true' and
'$(IsGeneratorProject)' != 'true' and
'$(IsTestProject)' != 'true' and
- '$(IsPublishedAppTestProject)' != 'true' and
+ '$(IsTrimmingTestProject)' != 'true' and
'$(IsTestSupportProject)' != 'true' and
'$(UsingMicrosoftDotNetSharedFrameworkSdk)' != 'true' and
'$(MSBuildProjectExtension)' != '.pkgproj' and
@@ -464,7 +468,7 @@
-
+
true
diff --git a/docs/coding-guidelines/coding-style.md b/docs/coding-guidelines/coding-style.md
index d584de83130279..32dd1ec8bb65e5 100644
--- a/docs/coding-guidelines/coding-style.md
+++ b/docs/coding-guidelines/coding-style.md
@@ -36,7 +36,7 @@ The general rule we follow is "use Visual Studio defaults".
An [EditorConfig](https://editorconfig.org "EditorConfig homepage") file (`.editorconfig`) has been provided at the root of the runtime repository, enabling C# auto-formatting conforming to the above guidelines.
-We also use the [.NET Codeformatter Tool](https://github.com/dotnet/codeformatter) to ensure the code base maintains a consistent style over time, the tool automatically fixes the code base to conform to the guidelines outlined above.
+We also use the [dotnet-format tool](https://learn.microsoft.com/dotnet/core/tools/dotnet-format) to ensure the code base maintains a consistent style over time, the tool automatically fixes the code base to conform to the guidelines outlined above.
### Example File:
diff --git a/docs/design/datacontracts/contract-descriptor.md b/docs/design/datacontracts/contract-descriptor.md
index 1e3ddabd6dd735..fbd58eb33eb9a5 100644
--- a/docs/design/datacontracts/contract-descriptor.md
+++ b/docs/design/datacontracts/contract-descriptor.md
@@ -24,9 +24,9 @@ struct DotNetRuntimeContractDescriptor
uint32_t flags;
uint32_t descriptor_size;
const char *descriptor;
- uint32_t aux_data_count;
+ uint32_t pointer_data_count;
uint32_t pad0;
- uintptr_t *aux_data;
+ uintptr_t *pointer_data;
};
```
@@ -45,7 +45,7 @@ reserved bits should be written as zero. Diagnostic tooling may ignore non-zero
The `descriptor` is a pointer to a UTF-8 JSON string described in [data descriptor physical layout](./data_descriptor.md#Physical_JSON_descriptor). The total number of bytes is given by `descriptor_size`.
-The auxiliary data for the JSON descriptor is stored at the location `aux_data` in `aux_data_count` pointer-sized slots.
+The auxiliary data for the JSON descriptor is stored at the location `pointer_data` in `pointer_data_count` pointer-sized slots.
### Architecture properties
@@ -83,7 +83,7 @@ a JSON integer constant.
"globals":
{
"FEATURE_COMINTEROP": 0,
- "s_pThreadStore": [ 0 ] // indirect from aux data offset 0
+ "s_pThreadStore": [ 0 ] // indirect from pointer data offset 0
},
"contracts": {"Thread": 1, "GCHandle": 1, "ThreadStore": 1}
}
diff --git a/docs/design/datacontracts/data/empty.jsonc b/docs/design/datacontracts/data/empty.jsonc
new file mode 100644
index 00000000000000..29d15882a36b81
--- /dev/null
+++ b/docs/design/datacontracts/data/empty.jsonc
@@ -0,0 +1,4 @@
+// the empty baseline data descriptor
+{
+ "version": 0
+}
diff --git a/docs/design/mono/mono-manpage-1.md b/docs/design/mono/mono-manpage-1.md
new file mode 100644
index 00000000000000..8f5bd56abb3d8e
--- /dev/null
+++ b/docs/design/mono/mono-manpage-1.md
@@ -0,0 +1,2012 @@
+# Mono Manual Page
+
+Copyright 2003 Ximian, Inc. \
+Copyright 2004-2011 Novell, Inc. \
+Copyright 2011-2012 Xamarin Inc \
+Copyright 2013 7digital Media Ltd. \
+Copyright (c) .NET Foundation and Contributors
+
+Author: \
+Miguel de Icaza (miguel@gnu.org)
+
+## NAME
+
+mono - Mono's ECMA-CLI native code generator (Just-in-Time and
+Ahead-of-Time)
+
+## SYNOPSIS
+
+**mono \[options\] file \[arguments...\]**
+
+**mono-sgen \[options\] file \[arguments...\]**
+
+## DESCRIPTION
+
+*mono* is a runtime implementation of the ECMA Common Language
+Infrastructure. This can be used to run ECMA and .NET applications.
+
+The runtime loads the specified *file* and optionally passes the
+*arguments* to it. The *file* is an ECMA assembly. They typically have a
+.exe or .dll extension.
+
+These executables can reference additional functionality in the form of
+assembly references. By default those assembly references are resolved
+as follows: the **mscorlib.dll** is resolved from the system profile
+that is configured by Mono, and other assemblies are loaded from the
+Global Assembly Cache (GAC).
+
+The runtime contains a native code generator that transforms the Common
+Intermediate Language into native code.
+
+The code generator can operate in two modes: Just-in-time compilation
+(JIT) or Ahead-of-time compilation (AOT). Since code can be dynamically
+loaded, the runtime environment and the JIT are always present, even if
+code is compiled ahead of time.
+
+The runtime provides a number of configuration options for running
+applications, for developing and debugging, and for testing and
+debugging the runtime itself.
+
+The *mono* command uses the moving and generational SGen garbage
+collector while the *mono-boehm* command uses the conservative Boehm
+garbage collector.
+
+## PORTABILITY
+
+On Unix-based systems, Mono provides a mechanism to emulate the
+Windows-style file access, this includes providing a case insensitive
+view of the file system, directory separator mapping (from \\ to /) and
+stripping the drive letters.
+
+This functionality is enabled by setting the **MONO_IOMAP** environment
+variable to one of **all, drive** and **case.**
+
+See the description for **MONO_IOMAP** in the environment variables
+section for more details.
+
+## METHOD DESCRIPTIONS
+
+A number of diagnostic command line options take as argument a method
+description. A method description is a textual representation that can
+be used to uniquely identify a method. The syntax is as follows:
+
+ [W:][namespace]classname:methodname[(arguments)]
+
+The values in brackets are optional, like the namespace and the
+arguments. The arguments themselves are either empty, or a
+comma-separated list of arguments. Both the **classname** and
+**methodname** can be set to the special value '\*' to match any values
+(Unix shell users should escape the argument to avoid the shell
+interpreting this).
+
+The arguments, if present should be a comma separated list of types
+either a full typename, or for built-in types it should use the
+low-level ILAsm type names for the built-in types, like 'void', 'char',
+'bool', 'byte', 'sbyte', 'uint16', 'int16', 'uint',
+
+Pointer types should be the name of the type, followed by a '\*', arrays
+should be the typename followed by '\[' one or more commas (to indicate
+the rank of the array), and '\]'.
+
+Generic values should use '\<', one or more type names, separated by
+both a comma and a space and '\>'.
+
+By-reference arguments should include a "&" after the typename.
+
+If the method description is prefixed by 'W:' (or 'w:'), then it will
+match a *wrapper* method that may be created by the runtime for the
+specified method. (For example imported P/Invoke methods may have a
+wrapper generated by the runtime.)
+
+
+Examples:
+
+ *:ctor(int) // All constructors that take an int as an argument
+ *:Main // Methods named Main in any class
+ *:Main(string[]) // Methods named Main that take a string array in any class
+ W:UnixSignal:install // Wrappers for the UnixSignal.install DllImport
+
+## RUNTIME OPTIONS
+
+The following options are available:
+
+**--aot**, **--aot\[=options\]**
+This option is used to precompile the CIL code in the specified assembly
+to native code. The generated code is stored in a file with the
+extension .so. This file will be automatically picked up by the runtime
+when the assembly is executed.
+
+Ahead-of-Time compilation is most useful if you use it in combination
+with the -O=all,-shared flag which enables all of the optimizations in
+the code generator to be performed. Some of those optimizations are not
+practical for Just-in-Time compilation since they might be very time
+consuming.
+
+Unlike the .NET Framework, Ahead-of-Time compilation will not generate
+domain independent code: it generates the same code that the
+Just-in-Time compiler would produce. Since most applications use a
+single domain, this is fine. If you want to optimize the generated code
+for use in multi-domain applications, consider using the -O=shared flag.
+
+This pre-compiles the methods, but the original assembly is still
+required to execute as this one contains the metadata and exception
+information which is not available on the generated file. When
+precompiling code, you might want to compile with all optimizations
+(-O=all). Pre-compiled code is position independent code.
+
+Precompilation is just a mechanism to reduce startup time, increase code
+sharing across multiple mono processes and avoid just-in-time
+compilation program startup costs. The original assembly must still be
+present, as the metadata is contained there.
+
+AOT code typically can not be moved from one computer to another
+(CPU-specific optimizations that are detected at runtime) so you should
+not try to move the pre-generated assemblies or package the
+pre-generated assemblies for deployment.
+
+A few options are available as a parameter to the **--aot** command line
+option. The options are separated by commas, and more than one can be
+specified:
+
+> *asmonly*
+> Instructs the AOT compiler to output assembly code instead of an
+> object file.
+>
+> *bind-to-runtime-version*
+>
+> If specified, forces the generated AOT files to be bound to the
+> runtime version of the compiling Mono. This will prevent the AOT files
+> from being consumed by a different Mono runtime.
+>
+> *data-outfile=FILE.dll.aotdata*
+>
+> This instructs the AOT code generator to output certain data
+> constructs into a separate file. This can reduce the executable images
+> some five to twenty percent. Developers need to then ship the
+> resulting aotdata as a resource and register a hook to load the data
+> on demand by using the *mono_install_load_aot_data_hook* method.
+>
+> *direct-icalls*
+>
+> When this option is specified, icalls (internal calls made from the
+> standard library into the mono runtime code) are invoked directly
+> instead of going through the operating system symbol lookup operation.
+> This requires use of the *static* option.
+>
+> *direct-pinvoke*
+>
+> When this option is specified, P/Invoke methods are invoked directly
+> instead of going through the operating system symbol lookup operation.
+> This requires use of the *static* option.
+>
+> *dwarfdebug*
+> Instructs the AOT compiler to emit DWARF debugging information. When
+> used together with the nodebug option, only DWARF debugging
+> information is emitted, but not the information that can be used at
+> runtime.
+>
+> *full*
+>
+> This creates binaries which can be used with the --full-aot option.
+>
+> *hybrid*
+>
+> This creates binaries which can be used with the --hybrid-aot option.
+>
+> *llvm*
+> AOT will be performed with the LLVM backend instead of the Mono
+> backend where possible. This will be slower to compile but most likely
+> result in a performance improvement.
+>
+> *llvmonly*
+> AOT will be performed with the LLVM backend exclusively and the Mono
+> backend will not be used. The only output in this mode will be the
+> bitcode file normally specified with the *llvm-outfile* option. Use of
+> *llvmonly* automatically enables the *full* and *llvm* options. This
+> feature is experimental.
+>
+> *llvmopts=\[options\]*
+> Use this option to add more flags to the built-in set of flags passed
+> to the LLVM optimizer. When you invoke the *mono* command with the
+> *--aot=llvm* it displays the current list of flags that are being
+> passed to the *opt* command. *The list of possible flags that can be
+> passed can be* obtained by calling the bundled *opt* program that
+> comes with Mono, and calling it like this:
+>
+>
+>
+>
+> opt --help
+>
+> *llvmllc=\[options\]*
+> Use this option to add more flags to the built-in set of flags passed
+> to the LLVM static compiler (llc). The list of possible flags that can
+> be passed can be obtained by calling the bundled *llc* program that
+> comes with Mono, and calling it like this:
+>
+>
+>
+>
+> llc --help
+>
+> *mcpu=\[native o generic\]*
+> cpu=native allows AOT mode to use all instructions current CPU
+> supports, e.g. AVX2, SSE42, etc. Default value is 'generic'.
+> *mattr=\[cpu feature\]* Allows AOT code generator to use specified CPU
+> features where possible including \`System.Runtime.Intrinsics.\*'.
+> E.g. \`mattr=+avx2,mattr=-lzcnt' unlocks sse1-4.2, avx1-2 and disables
+> lzcnt. It's useful for cross-compilation or when it's not possible to
+> use \`-mcpu=native' (which enables all cpu feature current cpu has).
+> *llvm-outfile=\[filename\]* Gives the path for the temporary LLVM
+> bitcode file created during AOT. *dedup* Each AOT module will
+> typically contain the code for inflated methods and wrappers that are
+> called by code in that module. In dedup mode, we identify and skip
+> compiling all of those methods. When using this mode with fullaot,
+> dedup-include is required or these methods will remain missing.
+>
+> *dedup-include=\[filename\]*
+> In dedup-include mode, we are in the pass of compilation where we
+> compile the methods that we had previously skipped. All of them are
+> emitted into the assembly that is passed as this option. We
+> consolidate the many duplicate skipped copies of the same method into
+> one.
+>
+>
+>
+> *info*
+> Print the architecture the AOT in this copy of Mono targets and quit.
+>
+> *interp*
+> Generates all required wrappers, so that it is possible to run
+> --interpreter without any code generation at runtime. This option only
+> makes sense with **mscorlib.dll**. Embedders can set
+>
+> *depfile=\[filename\]*
+> Outputs a gcc -M style dependency file.
+>
+> mono_jit_set_aot_mode (MONO_AOT_MODE_INTERP);
+>
+>
+>
+> *ld-flags*
+> Additional flags to pass to the C linker (if the current AOT mode
+> calls for invoking it).
+>
+> *llvm-path=\*
+> Same for the llvm tools 'opt' and 'llc'.
+>
+> *msym-dir=\*
+> Instructs the AOT compiler to generate offline sequence points .msym
+> files. The generated .msym files will be stored into a subfolder of
+> \ named as the compilation AOTID.
+>
+> *mtriple=\*
+> Use the GNU style target triple \ to determine some code
+> generation options, i.e. --mtriple=armv7-linux-gnueabi will generate
+> code that targets ARMv7. This is currently only supported by the ARM
+> backend. In LLVM mode, this triple is passed on to the LLVM llc
+> compiler.
+>
+> *nimt-trampolines=\[number\]*
+> When compiling in full aot mode, the IMT trampolines must be
+> precreated in the AOT image. You can add additional method trampolines
+> with this argument. Defaults to 512.
+>
+> *ngsharedvt-trampolines=\[number\]*
+> When compiling in full aot mode, the value type generic sharing
+> trampolines must be precreated in the AOT image. You can add
+> additional method trampolines with this argument. Defaults to 512.
+>
+> *nodebug*
+> Instructs the AOT compiler to not output any debugging information.
+>
+> *no-direct-calls*
+> This prevents the AOT compiler from generating a direct calls to a
+> method. The AOT compiler usually generates direct calls for certain
+> methods that do not require going through the PLT (for example,
+> methods that are known to not require a hook like a static
+> constructor) or call into simple internal calls.
+>
+> *nrgctx-trampolines=\[number\]*
+> When compiling in full aot mode, the generic sharing trampolines must
+> be precreated in the AOT image. You can add additional method
+> trampolines with this argument. Defaults to 4096.
+>
+> *nrgctx-fetch-trampolines=\[number\]*
+> When compiling in full aot mode, the generic sharing fetch trampolines
+> must be precreated in the AOT image. You can add additional method
+> trampolines with this argument. Defaults to 128.
+>
+> *ntrampolines=\[number\]*
+> When compiling in full aot mode, the method trampolines must be
+> precreated in the AOT image. You can add additional method trampolines
+> with this argument. Defaults to 4096.
+>
+> *outfile=\[filename\]*
+> Instructs the AOT compiler to save the output to the specified file.
+>
+> *print-skipped-methods*
+> If the AOT compiler cannot compile a method for any reason, enabling
+> this flag will output the skipped methods to the console.
+>
+> *profile=\[file\]*
+> Specify a file to use for profile-guided optimization. See the **AOT
+> profiler** sub-section. To specify multiple files, include the
+> *profile* option multiple times.
+>
+> *profile-only*
+> AOT \*only\* the methods described in the files specified with the
+> *profile* option. See the **AOT profiler** sub-section.
+>
+> *readonly-value=namespace.typename.fieldname=type/value*
+> Override the value of a static readonly field. Usually, during JIT
+> compilation, the static constructor is ran eagerly, so the value of a
+> static readonly field is known at compilation time and the compiler
+> can do a number of optimizations based on it. During AOT, instead, the
+> static constructor can't be ran, so this option can be used to set the
+> value of such a field and enable the same set of optimizations. Type
+> can be any of i1, i2, i4 for integers of the respective sizes (in
+> bytes). Note that signed/unsigned numbers do not matter here, just the
+> storage size. This option can be specified multiple times and it
+> doesn't prevent the static constructor for the type defining the field
+> to execute with the usual rules at runtime (hence possibly computing a
+> different value for the field).
+>
+> *save-temps,keep-temps*
+> Instructs the AOT compiler to keep temporary files.
+>
+> *soft-debug*
+> This instructs the compiler to generate sequence point checks that
+> allow Mono's soft debugger to debug applications even on systems where
+> it is not possible to set breakpoints or to single step (certain
+> hardware configurations like the cell phones and video gaming
+> consoles).
+>
+> *static*
+> Create an ELF object file (.o) or .s file which can be statically
+> linked into an executable when embedding the mono runtime. When this
+> option is used, the object file needs to be registered with the
+> embedded runtime using the mono_aot_register_module function which
+> takes as its argument the mono_aot_module\_\\_info
+> global symbol from the object file:
+>
+> extern void *mono_aot_module_hello_info;
+>
+> mono_aot_register_module (mono_aot_module_hello_info);
+>
+>
+>
+> *stats*
+> Print various stats collected during AOT compilation.
+>
+> *temp-path=\[path\]*
+> Explicitly specify path to store temporary files created during AOT
+> compilation.
+>
+> *threads=\[number\]*
+> This is an experimental option for the AOT compiler to use multiple
+> threads when compiling the methods.
+>
+> *tool-prefix=\*
+> Prepends \ to the name of tools ran by the AOT compiler, i.e.
+> 'as'/'ld'. For example, --tool=prefix=arm-linux-gnueabi- will make the
+> AOT compiler run
+>
+>
+>
+> *ld-name=NAME*
+> One of the tools used for AOT builds is the linker. Its name differs
+> between various systems and it may happen that the assumed default
+> name of the binary is not present. If the toolchain used does not have
+> a linker with the default name (e.g. Android NDK r22 does not have the
+> default 'ld' linker prefixed with 'tool-prefix' above, instead it has
+> prefixed 'ld.gold' and 'ld.bfd' linkers) this option can be used to
+> set the linker binary name. It will be prefixed with 'tool-prefix' to
+> form the full linker executable name.
+>
+> *verbose*
+> Prints additional information about type loading failures.
+>
+> *write-symbols,no-write-symbols*
+> Instructs the AOT compiler to emit (or not emit) debug symbol
+> information.
+>
+> *no-opt*
+> Instructs the AOT compiler tot no call opt when compiling with LLVM.
+>
+> For more information about AOT, see:
+> http://www.mono-project.com/docs/advanced/aot/
+
+**--aot-path=PATH**
+List of additional directories to search for AOT images.
+
+**--apply-bindings=FILE**
+Apply the assembly bindings from the specified configuration file when
+running the AOT compiler. This is useful when compiling an auxiliary
+assembly that is referenced by a main assembly that provides a
+configuration file. For example, if app.exe uses lib.dll then in order
+to make the assembly bindings from app.exe.config available when
+compiling lib.dll ahead of time, use:
+
+
+
+ mono --apply-bindings=app.exe.config --aot lib.dll
+
+**--assembly-loader=MODE**
+If mode is **strict**, Mono will check that the public key token,
+culture and version of a candidate assembly matches the requested strong
+name. If mode is **legacy**, as long as the name matches, the candidate
+will be allowed. **strict** is the behavior consistent with .NET
+Framework but may break some existing mono-based applications. The
+default is **legacy**.
+
+**--attach=\[options\]**
+Currently the only option supported by this command line argument is
+**disable** which disables the attach functionality.
+
+**--config filename**
+Load the specified configuration file instead of the default one(s). The
+default files are /etc/mono/config and ~/.mono/config or the file
+specified in the MONO_CONFIG environment variable, if set. See the
+mono-config(5) man page for details on the format of this file.
+
+**--debugger-agent=\[options\]**
+This instructs the Mono runtime to start a debugging agent inside the
+Mono runtime and connect it to a client user interface will control the
+Mono process. This option is typically used by IDEs, like the
+MonoDevelop or Visual Studio IDEs.
+
+The configuration is specified using one of more of the following
+options:
+
+> *address=host:port*
+>
+> Use this option to specify the IP address where your debugger client
+> is listening to.
+>
+> *loglevel=LEVEL*
+>
+> Specifies the diagnostics log level for
+>
+> *logfile=filename*
+>
+> Used to specify the file where the log will be stored, it defaults to
+> standard output.
+>
+> *server=\[y/n\]*
+> Defaults to no, with the default option Mono will actively connect to
+> the host/port configured with the **address** option. If you set it to
+> 'y', it instructs the Mono runtime to start debugging in server mode,
+> where Mono actively waits for the debugger front end to connect to the
+> Mono process. Mono will print out to stdout the IP address and port
+> where it is listening.
+>
+> *setpgid=\[y/n\]*
+> If set to yes, Mono will call **setpgid(0, 0)** on startup, if that
+> function is available on the system. This is useful for ensuring that
+> signals delivered to a process that is executing the debuggee are not
+> propagated to the debuggee, e.g. when Ctrl-C sends **SIGINT** to the
+> **sdb** tool.
+>
+> *suspend=\[y/n\]*
+> Defaults to yes, with the default option Mono will suspend the vm on
+> startup until it connects successfully to a debugger front end. If you
+> set it to 'n', in conjunction with **server=y**, it instructs the Mono
+> runtime to run as normal, while caching metadata to send to the
+> debugger front end on connection..
+>
+> *transport=transport_name*
+>
+> This is used to specify the transport that the debugger will use to
+> communicate. It must be specified and currently requires this to be
+> 'dt_socket'.
+
+**--desktop**
+Configures the virtual machine to be better suited for desktop
+applications. Currently this sets the GC system to avoid expanding the
+heap as much as possible at the expense of slowing down garbage
+collection a bit.
+
+**--full-aot**
+This flag instructs the Mono runtime to not generate any code at runtime
+and depend exclusively on the code generated from using mono --aot=full
+previously. This is useful for platforms that do not permit dynamic code
+generation, or if you need to run assemblies that have been stripped of
+IL (for example using mono-cil-strip).
+
+Notice that this feature will abort execution at runtime if a codepath
+in your program, or Mono's class libraries attempts to generate code
+dynamically. You should test your software upfront and make sure that
+you do not use any dynamic features.
+
+**--full-aot-interp**
+Same as --full-aot with fallback to the interpreter.
+
+**--gc=boehm**, **--gc=sgen**
+Selects the Garbage Collector engine for Mono to use, Boehm or SGen.
+Currently this merely ensures that you are running either the *mono* or
+*mono-sgen* commands. This flag can be set in the **MONO_ENV_OPTIONS**
+environment variable to force all of your child processes to use one
+particular kind of GC with the Mono runtime.
+
+**--gc-debug=\[options\]**
+Command line equivalent of the **MONO_GC_DEBUG** environment variable.
+
+**--gc-params=\[options\]**
+Command line equivalent of the **MONO_GC_PARAMS** environment variable.
+
+**--arch=32**, **--arch=64**
+(Mac OS X only): Selects the bitness of the Mono binary used, if
+available. If the binary used is already for the selected bitness,
+nothing changes. If not, the execution switches to a binary with the
+selected bitness suffix installed side by side (for example, '/bin/mono
+--arch=64' will switch to '/bin/mono64' iff '/bin/mono' is a 32-bit
+build).
+
+**--help**, **-h**
+Displays usage instructions.
+
+**--interpreter**
+The Mono runtime will use its interpreter to execute a given assembly.
+The interpreter is usually slower than the JIT, but it can be useful on
+platforms where code generation at runtime is not allowed.
+
+**--hybrid-aot**
+This flag allows the Mono runtime to run assemblies that have been
+stripped of IL, for example using mono-cil-strip. For this to work, the
+assembly must have been AOT compiled with --aot=hybrid.
+
+This flag is similar to --full-aot, but it does not disable the JIT.
+This means you can use dynamic features such as System.Reflection.Emit.
+
+**--llvm**
+If the Mono runtime has been compiled with LLVM support (not available
+in all configurations), Mono will use the LLVM optimization and code
+generation engine to JIT or AOT compile.
+
+For more information, consult:
+http://www.mono-project.com/docs/advanced/mono-llvm/
+
+**--ffast-math**
+This flag allows Mono and LLVM to apply aggressive floating point
+optimizations. Can break IEEE754 compliance.
+
+**--nollvm**
+When using a Mono that has been compiled with LLVM support, it forces
+Mono to fallback to its JIT engine and not use the LLVM backend.
+
+**--optimize=MODE**, **-O=MODE**
+MODE is a comma separated list of optimizations. They also allow
+optimizations to be turned off by prefixing the optimization name with a
+minus sign.
+
+In general, Mono has been tuned to use the default set of flags, before
+using these flags for a deployment setting, you might want to actually
+measure the benefits of using them.
+
+The following optimization flags are implemented in the core engine:
+
+ abcrem Array bound checks removal
+ all Turn on all optimizations
+ aot Usage of Ahead Of Time compiled code
+ branch Branch optimizations
+ cfold Constant folding
+ cmov Conditional moves [arch-dependency]
+ deadce Dead code elimination
+ consprop Constant propagation
+ copyprop Copy propagation
+ fcmov Fast x86 FP compares [arch-dependency]
+ float32 Perform 32-bit float arithmetic using 32-bit operations
+ gshared Enable generic code sharing.
+ inline Inline method calls
+ intrins Intrinsic method implementations
+ linears Linear scan global reg allocation
+ leaf Leaf procedures optimizations
+ loop Loop related optimizations
+ peephole Peephole postpass
+ precomp Precompile all methods before executing Main
+ sched Instruction scheduling
+ shared Emit per-domain code
+ sse2 SSE2 instructions on x86 [arch-dependency]
+ tailc Tail recursion and tail calls
+
+For example, to enable all the optimization but dead code elimination
+and inlining, you can use:
+
+ -O=all,-deadce,-inline
+
+The flags that are flagged with \[arch-dependency\] indicate that the
+given option if used in combination with Ahead of Time compilation
+(--aot flag) would produce pre-compiled code that will depend on the
+current CPU and might not be safely moved to another computer.
+
+> The following optimizations are supported
+>
+> *float32*
+> Requests that the runtime performn 32-bit floating point operations
+> using only 32-bits. By default the Mono runtime tries to use the
+> highest precision available for floating point operations, but while
+> this might render better results, the code might run slower. This
+> options also affects the code generated by the LLVM backend.
+>
+> *inline*
+> Controls whether the runtime should attempt to inline (the default),
+> or not inline methods invocations
+
+**--response=FILE** Provides a response file, this instructs the Mono
+command to read other command line options from the specified file, as
+if the options had been specified on the command line. Useful when you
+have very long command lines.
+
+**--runtime=VERSION**
+Mono supports different runtime versions. The version used depends on
+the program that is being run or on its configuration file (named
+program.exe.config). This option can be used to override such
+autodetection, by forcing a different runtime version to be used. Note
+that this should only be used to select a later compatible runtime
+version than the one the program was compiled against. A typical usage
+is for running a 1.1 program on a 2.0 version:
+
+ mono --runtime=v2.0.50727 program.exe
+
+**--server**
+Configures the virtual machine to be better suited for server operations
+(currently, allows a heavier threadpool initialization).
+
+**--use-map-jit**
+Instructs Mono to generate code using MAP_JIT on MacOS. Necessary for
+bundled applications.
+
+**--verify-all**
+Verifies mscorlib and assemblies in the global assembly cache for valid
+IL, and all user code for IL verifiability.
+
+This is different from **--security**'s verifiable or validil in that
+these options only check user code and skip mscorlib and assemblies
+located on the global assembly cache.
+
+**-V**, **--version**
+Prints JIT version information (system configuration, release number and
+branch names if available).
+
+**--version=number**
+Print version number only.
+
+## DEVELOPMENT OPTIONS
+
+The following options are used to help when developing a JITed
+application.
+
+**--debug**, **--debug=OPTIONS**
+Turns on the debugging mode in the runtime. If an assembly was compiled
+with debugging information, it will produce line number information for
+stack traces.
+
+The optional OPTIONS argument is a comma separated list of debugging
+options. These options are turned off by default since they generate
+much larger and slower code at runtime.
+
+The following options are supported:
+*casts*
+Produces a detailed error when throwing a InvalidCastException. This
+option needs to be enabled as this generates more verbose code at
+execution time.
+
+*mdb-optimizations*
+Disable some JIT optimizations which are usually only disabled when
+running inside the debugger. This can be helpful if you want to attach
+to the running process with mdb.
+
+*gdb*
+Generate and register debugging information with gdb. This is only
+supported on some platforms, and only when using gdb 7.0 or later.
+
+**--profile**\[=*profiler*\[:*profiler_args*\]\]
+Loads a profiler module with the given arguments. For more information,
+see the **PROFILING** section.
+
+This option can be used multiple times; each time will load an
+additional profiler module.
+
+**--trace\[=expression\]**
+Shows method names as they are invoked. By default all methods are
+traced.
+
+The trace can be customized to include or exclude methods, classes or
+assemblies. A trace expression is a comma separated list of targets,
+each target can be prefixed with a minus sign to turn off a particular
+target. The words \`program', \`all' and \`disabled' have special
+meaning. \`program' refers to the main program being executed, and
+\`all' means all the method calls.
+
+The \`disabled' option is used to start up with tracing disabled. It can
+be enabled at a later point in time in the program by sending the
+SIGUSR2 signal to the runtime.
+
+Assemblies are specified by their name, for example, to trace all calls
+in the System assembly, use:
+
+
+ mono --trace=System app.exe
+
+Classes are specified with the T: prefix. For example, to trace all
+calls to the System.String class, use:
+
+
+ mono --trace=T:System.String app.exe
+
+And individual methods are referenced with the M: prefix, and the
+standard method notation:
+
+
+ mono --trace=M:System.Console:WriteLine app.exe
+
+Exceptions can also be traced, it will cause a stack trace to be printed
+every time an exception of the specified type is thrown. The exception
+type can be specified with or without the namespace, and to trace all
+exceptions, specify 'all' as the type name.
+
+
+ mono --trace=E:System.Exception app.exe
+
+As previously noted, various rules can be specified at once:
+
+
+ mono --trace=T:System.String,T:System.Random app.exe
+
+You can exclude pieces, the next example traces calls to System.String
+except for the System.String:Concat method.
+
+
+ mono --trace=T:System.String,-M:System.String:Concat
+
+You can trace managed to unmanaged transitions using the wrapper
+qualifier:
+
+
+ mono --trace=wrapper app.exe
+
+Finally, namespaces can be specified using the N: prefix:
+
+
+ mono --trace=N:System.Xml
+
+**--no-x86-stack-align**
+Don't align stack frames on the x86 architecture. By default, Mono
+aligns stack frames to 16 bytes on x86, so that local floating point and
+SIMD variables can be properly aligned. This option turns off the
+alignment, which usually saves one instruction per call, but might
+result in significantly lower floating point and SIMD performance.
+
+**--jitmap**
+Generate a JIT method map in a /tmp/perf-PID.map file. This file is then
+used, for example, by the perf tool included in recent Linux kernels.
+Each line in the file has:
+
+
+
+
+ HEXADDR HEXSIZE methodname
+
+Currently this option is only supported on Linux.
+
+## JIT MAINTAINER OPTIONS
+
+The maintainer options are only used by those developing the runtime
+itself, and not typically of interest to runtime users or developers.
+
+**--bisect=optimization:filename**
+This flag is used by the automatic optimization bug bisector. It takes
+an optimization flag and a filename of a file containing a list of full
+method names, one per line. When it compiles one of the methods in the
+file it will use the optimization given, in addition to the
+optimizations that are otherwise enabled. Note that if the optimization
+is enabled by default, you should disable it with \`-O\`, otherwise it
+will just apply to every method, whether it's in the file or not.
+
+**--break method**
+Inserts a breakpoint before the method whose name is \`method'
+(namespace.class:methodname). Use \`Main' as method name to insert a
+breakpoint on the application's main method. You can use it also with
+generics, for example "System.Collections.Generic.Queue\`1:Peek"
+
+**--breakonex**
+Inserts a breakpoint on exceptions. This allows you to debug your
+application with a native debugger when an exception is thrown.
+
+**--compile name**
+This compiles a method (namespace.name:methodname), this is used for
+testing the compiler performance or to examine the output of the code
+generator.
+
+**--compile-all**
+Compiles all the methods in an assembly. This is used to test the
+compiler performance or to examine the output of the code generator
+
+**--graph=TYPE METHOD**
+This generates a postscript file with a graph with the details about the
+specified method (namespace.name:methodname). This requires \`dot' and
+ghostview to be installed (it expects Ghostview to be called "gv").
+
+The following graphs are available:
+
+ cfg Control Flow Graph (CFG)
+ dtree Dominator Tree
+ code CFG showing code
+ ssa CFG showing code after SSA translation
+ optcode CFG showing code after IR optimizations
+
+Some graphs will only be available if certain optimizations are turned
+on.
+
+**--ncompile**
+Instruct the runtime on the number of times that the method specified by
+--compile (or all the methods if --compile-all is used) to be compiled.
+This is used for testing the code generator performance.
+
+**--stats=\[method\]**
+Displays information about the work done by the runtime during the
+execution of an application. If a method (namespace.name:methodname) is
+specified, it will display that information when the method is first run
+in addition to the end of program execution.
+
+**--wapi=hps\|semdel**
+Perform maintenance of the process shared data.
+
+semdel will delete the global semaphore.
+
+hps will list the currently used handles.
+
+**-v**, **--verbose**
+Increases the verbosity level, each time it is listed, increases the
+verbosity level to include more information (including, for example, a
+disassembly of the native code produced, code selector info etc.).
+
+## ATTACH SUPPORT
+
+The Mono runtime allows external processes to attach to a running
+process and load assemblies into the running program. To attach to the
+process, a special protocol is implemented in the Mono.Management
+assembly.
+
+With this support it is possible to load assemblies that have an entry
+point (they are created with -target:exe or -target:winexe) to be loaded
+and executed in the Mono process.
+
+The code is loaded into the root domain, and it starts execution on the
+special runtime attach thread. The attached program should create its
+own threads and return after invocation.
+
+This support allows for example debugging applications by having the
+csharp shell attach to running processes.
+
+## PROFILING
+
+The Mono runtime includes a profiler API that dynamically loaded
+profiler modules and embedders can use to collect performance-related
+data about an application. Profiler modules are loaded by passing the
+**--profile** command line argument to the Mono runtime.
+
+Mono ships with a few profiler modules, of which the **log** profiler is
+the most feature-rich. It is also the default profiler if the *profiler*
+argument is not given, or if **default** is given. It is possible to
+write your own profiler modules; see the **Custom profilers**
+sub-section.
+
+### Log profiler
+
+The log profiler can be used to collect a lot of information about a
+program running in the Mono runtime. This data can be used (both while
+the process is running and later) to do analyses of the program
+behavior, determine resource usage, performance issues or even look for
+particular execution patterns.
+
+This is accomplished by logging the events provided by the Mono runtime
+through the profiler API and periodically writing them to a file which
+can later be inspected with the **mprof-report**(1) tool.
+
+More information about how to use the log profiler is available on the
+**mono-profilers**(1) page, under the **LOG PROFILER** section, as well
+as the **mprof-report**(1) page.
+
+### Coverage profiler
+
+The code coverage profiler can instrument a program to help determine
+which classes, methods, code paths, etc are actually executed. This is
+most useful when running a test suite to determine whether the tests
+actually cover the code they're expected to.
+
+More information about how to use the coverage profiler is available on
+the **mono-profilers**(1) page, under the **COVERAGE** PROFILER section.
+
+### AOT profiler
+
+The AOT profiler can help improve startup performance by logging which
+generic instantiations are used by a program, which the AOT compiler can
+then use to compile those instantiations ahead of time so that they
+won't have to be JIT compiled at startup.
+
+More information about how to use the AOT profiler is available on the
+**mono-profilers**(1) page, under the **AOT PROFILER** section.
+
+### Custom profilers
+
+Custom profiler modules can be loaded in exactly the same way as the
+standard modules that ship with Mono. They can also access the same
+profiler API to gather all kinds of information about the code being
+executed.
+
+For example, to use a third-party profiler called **custom**, you would
+load it like this:
+
+ mono --profile=custom program.exe
+
+You could also pass arguments to it:
+
+ mono --profile=custom:arg1,arg2=arg3 program.exe
+
+In the above example, Mono will load the profiler from the shared
+library called *libmono-profiler-custom.so* (name varies based on
+platform, e.g., *libmono-profiler-custom.dylib* on OS X). This profiler
+module must be on your dynamic linker library path (**LD_LIBRARY_PATH**
+on most systems, **DYLD_LIBRARY_PATH** on OS X).
+
+For a sample of how to write your own custom profiler, look at the
+*samples/profiler/sample.c* file in the Mono source tree.
+
+## DEBUGGING AIDS
+
+To debug managed applications, you can use the **mdb** command, a
+command line debugger.
+
+It is possible to obtain a stack trace of all the active threads in Mono
+by sending the QUIT signal to Mono, you can do this from the command
+line, like this:
+
+
+ kill -QUIT pid
+
+Where pid is the Process ID of the Mono process you want to examine. The
+process will continue running afterwards, but its state is not
+guaranteed.
+
+**Important:** this is a last-resort mechanism for debugging
+applications and should not be used to monitor or probe a production
+application. The integrity of the runtime after sending this signal is
+not guaranteed and the application might crash or terminate at any given
+point afterwards.
+
+The **--debug=casts** option can be used to get more detailed
+information for Invalid Cast operations, it will provide information
+about the types involved.
+
+You can use the MONO_LOG_LEVEL and MONO_LOG_MASK environment variables
+to get verbose debugging output about the execution of your application
+within Mono.
+
+The *MONO_LOG_LEVEL* environment variable if set, the logging level is
+changed to the set value. Possible values are "error", "critical",
+"warning", "message", "info", "debug". The default value is "error".
+Messages with a logging level greater then or equal to the log level
+will be printed to stdout/stderr.
+
+Use "info" to track the dynamic loading of assemblies.
+
+Use the *MONO_LOG_MASK* environment variable to limit the extent of the
+messages you get: If set, the log mask is changed to the set value.
+Possible values are "asm" (assembly loader), "type", "dll" (native
+library loader), "gc" (garbage collector), "cfg" (config file loader),
+"aot" (precompiler), "security" (e.g. Moonlight CoreCLR support),
+"threadpool" (thread pool generic), "io-selector" (async socket
+operations), "io-layer" (I/O layer - processes, files, sockets, events,
+semaphores, mutexes and handles), "io-layer-process", "io-layer-file",
+"io-layer-socket", "io-layer-event", "io-layer-semaphore",
+"io-layer-mutex", "io-layer-handle" and "all". The default value is
+"all". Changing the mask value allows you to display only messages for a
+certain component. You can use multiple masks by comma separating them.
+For example to see config file messages and assembly loader messages set
+you mask to "asm,cfg".
+
+The following is a common use to track down problems with P/Invoke:
+
+
+ $ MONO_LOG_LEVEL="debug" MONO_LOG_MASK="dll" mono glue.exe
+
+## DEBUGGING WITH LLDB
+
+If you are using LLDB, you can use the **mono.py** script to print some
+internal data structures with it. To use this, add this to your
+**\$HOME/.lldbinit** file:
+
+ command script import $PREFIX/lib/mono/lldb/mono.py
+
+Where \$PREFIX is the prefix value that you used when you configured
+Mono (typically /usr).
+
+Once this is done, then you can inspect some Mono Runtime data
+structures, for example:
+
+ (lldb) p method
+
+ (MonoMethod *) $0 = 0x05026ac0 [mscorlib]System.OutOfMemoryException:.ctor()
+
+## SERIALIZATION
+
+Mono's XML serialization engine by default will use a reflection-based
+approach to serialize which might be slow for continuous processing (web
+service applications). The serialization engine will determine when a
+class must use a hand-tuned serializer based on a few parameters and if
+needed it will produce a customized C# serializer for your types at
+runtime. This customized serializer then gets dynamically loaded into
+your application.
+
+You can control this with the MONO_XMLSERIALIZER_THS environment
+variable.
+
+The possible values are **\`no'** to disable the use of a C# customized
+serializer, or an integer that is the minimum number of uses before the
+runtime will produce a custom serializer (0 will produce a custom
+serializer on the first access, 50 will produce a serializer on the 50th
+use). Mono will fallback to an interpreted serializer if the serializer
+generation somehow fails. This behavior can be disabled by setting the
+option **\`nofallback'** (for example:
+MONO_XMLSERIALIZER_THS=0,nofallback).
+
+## ENVIRONMENT VARIABLES
+
+**GC_DONT_GC**
+Turns off the garbage collection in Mono. This should be only used for
+debugging purposes
+
+**HTTP_PROXY**
+(Also **http_proxy**) If set, web requests using the Mono Class Library
+will be automatically proxied through the given URL. Not supported on
+Windows, Mac OS, iOS or Android. See also **NO_PROXY**.
+
+**LLVM_COUNT**
+When Mono is compiled with LLVM support, this instructs the runtime to
+stop using LLVM after the specified number of methods are JITed. This is
+a tool used in diagnostics to help isolate problems in the code
+generation backend. For example **LLVM_COUNT=10** would only compile 10
+methods with LLVM and then switch to the Mono JIT engine.
+**LLVM_COUNT=0** would disable the LLVM engine altogether.
+
+**MONO_ASPNET_INHIBIT_SETTINGSMAP**
+Mono contains a feature which allows modifying settings in the .config
+files shipped with Mono by using config section mappers. The mappers and
+the mapping rules are defined in the \$prefix/etc/mono/2.0/settings.map
+file and, optionally, in the settings.map file found in the top-level
+directory of your ASP.NET application. Both files are read by System.Web
+on application startup, if they are found at the above locations. If you
+don't want the mapping to be performed you can set this variable in your
+environment before starting the application and no action will be taken.
+
+**MONO_ASPNET_WEBCONFIG_CACHESIZE**
+Mono has a cache of ConfigSection objects for speeding up
+WebConfigurationManager queries. Its default size is 100 items, and when
+more items are needed, cache evictions start happening. If evictions are
+too frequent this could impose unnecessary overhead, which could be
+avoided by using this environment variable to set up a higher cache size
+(or to lower memory requirements by decreasing it).
+
+**MONO_CAIRO_DEBUG_DISPOSE**
+If set, causes Mono.Cairo to collect stack traces when objects are
+allocated, so that the finalization/Dispose warnings include information
+about the instance's origin.
+
+**MONO_CFG_DIR**
+If set, this variable overrides the default system configuration
+directory (\$PREFIX/etc). It's used to locate machine.config file.
+
+**MONO_COM**
+Sets the style of COM interop. If the value of this variable is "MS"
+Mono will use string marhsalling routines from the liboleaut32 for the
+BSTR type library, any other values will use the mono-builtin BSTR
+string marshalling.
+
+**MONO_CONFIG**
+If set, this variable overrides the default runtime configuration file
+(\$PREFIX/etc/mono/config). The --config command line options overrides
+the environment variable.
+
+**MONO_CPU_ARCH**
+Override the automatic cpu detection mechanism. Currently used only on
+arm. The format of the value is as follows:
+
+
+
+
+ "armvV [thumb[2]]"
+
+where V is the architecture number 4, 5, 6, 7 and the options can be
+currently be "thumb" or "thumb2". Example:
+
+
+ MONO_CPU_ARCH="armv4 thumb" mono ...
+
+**MONO_ARM_FORCE_SOFT_FLOAT**
+When Mono is built with a soft float fallback on ARM and this variable
+is set to "1", Mono will always emit soft float code, even if a VFP unit
+is detected.
+
+**MONO_DARWIN_USE_KQUEUE_FSW**
+Fall back on the kqueue FileSystemWatcher implementation in Darwin. The
+default is the FSEvent implementation.
+
+**MONO_DARWIN_WATCHER_MAXFDS**
+This is a debugging aid used to force limits on the kqueue
+FileSystemWatcher implementation in Darwin. There is no limit by
+default.
+
+**MONO_DISABLE_MANAGED_COLLATION**
+If this environment variable is \`yes', the runtime uses unmanaged
+collation (which actually means no culture-sensitive collation). It
+internally disables managed collation functionality invoked via the
+members of System.Globalization.CompareInfo class. Collation is enabled
+by default.
+
+**MONO_DISABLE_SHARED_AREA**
+Unix only: If set, disable usage of shared memory for exposing
+performance counters. This means it will not be possible to both
+externally read performance counters from this processes or read those
+of external processes.
+
+**MONO_DNS**
+When set, enables the use of a fully managed DNS resolver instead of the
+regular libc functions. This resolver performs much better when multiple
+queries are run in parallel.
+
+Note that /etc/nsswitch.conf will be ignored.
+
+**MONO_EGD_SOCKET**
+For platforms that do not otherwise have a way of obtaining random bytes
+this can be set to the name of a file system socket on which an egd or
+prngd daemon is listening.
+
+**MONO_ENABLE_AIO**
+If set, tells mono to attempt using native asynchronous I/O services. If
+not set, a default select/poll implementation is used. Currently epoll
+and kqueue are supported.
+
+**MONO_THREADS_SUSPEND** Selects a mechanism that Mono will use to suspend
+threads. May be set to "preemptive", "coop", or "hybrid". Threads may
+need to be suspended by the debugger, or using some .NET threading APIs,
+and most commonly when the SGen garbage collector needs to stop all
+threads during a critical phase of garbage collection. Preemptive mode
+is the mode that Mono has used historically, going back to the Boehm
+days, where the garbage collector would run at any point and suspend
+execution of all threads as required to perform a garbage collection.
+The cooperative mode on the other hand requires the cooperation of all
+threads to stop at a safe point. This makes for an easier to debug
+garbage collector and it improves the stability of the runtime because
+threads are not suspended when accessing critical resources. In
+scenarios where Mono is embedded in another application, cooperative
+suspend requires the embedder code to follow coding guidelines in order
+to cooperate with the garbage collector. Cooperative suspend in embedded
+Mono is currently experimental. Hybrid mode is a combination of the two
+that retains better compatability with scenarios where Mono is embedded
+in another application: threads that are running managed code or code
+that comprises the Mono runtime will be cooperatively suspended, while
+threads running embedder code will be preemptively suspended. Hybrid
+suspend is the default on some desktop platforms.
+
+Alternatively, coop and hybrid mode can be enabled at compile time by
+using the --enable-cooperative-suspend or --enable-hybrid-suspend flags,
+respectively, when calling configure. The **MONO_THREADS_SUSPEND**
+environment variable takes priority over the compiled default.
+
+**MONO_ENABLE_COOP_SUSPEND**
+This environment variable is obsolete, but retained for backward
+compatibility. Use **MONO_THREADS_SUSPEND** set to "coop" instead. Note
+that if configure flags were provided to enable cooperative or hybrid
+suspend, this variable is ignored.
+
+**MONO_ENV_OPTIONS**
+This environment variable allows you to pass command line arguments to a
+Mono process through the environment. This is useful for example to
+force all of your Mono processes to use LLVM or SGEN without having to
+modify any launch scripts.
+
+**MONO_SDB_ENV_OPTIONS**
+Used to pass extra options to the debugger agent in the runtime, as they
+were passed using --debugger-agent=.
+
+**MONO_EVENTLOG_TYPE**
+Sets the type of event log provider to use (for
+System.Diagnostics.EventLog).
+
+Possible values are:
+
+> *local\[:path\]*
+>
+> Persists event logs and entries to the local file system.
+>
+> The directory in which to persist the event logs, event sources and
+> entries can be specified as part of the value.
+>
+> If the path is not explicitly set, it defaults to
+> "/var/lib/mono/eventlog" on unix and "%APPDATA%no\ventlog" on Windows.
+>
+> *win32*
+>
+> **Uses the native win32 API to write events and registers event logs
+> and** event sources in the registry. This is only available on
+> Windows.
+>
+> On Unix, the directory permission for individual event log and event
+> source directories is set to 777 (with +t bit) allowing everyone to
+> read and write event log entries while only allowing entries to be
+> deleted by the user(s) that created them.
+>
+> *null*
+>
+> Silently discards any events.
+>
+> The default is "null" on Unix (and versions of Windows before NT), and
+> "win32" on Windows NT (and higher).
+
+**MONO_EXTERNAL_ENCODINGS**
+If set, contains a colon-separated list of text encodings to try when
+turning externally-generated text (e.g. command-line arguments or
+filenames) into Unicode. The encoding names come from the list provided
+by iconv, and the special case "default_locale" which refers to the
+current locale's default encoding.
+
+When reading externally-generated text strings UTF-8 is tried first, and
+then this list is tried in order with the first successful conversion
+ending the search. When writing external text (e.g. new filenames or
+arguments to new processes) the first item in this list is used, or
+UTF-8 if the environment variable is not set.
+
+The problem with using MONO_EXTERNAL_ENCODINGS to process your files is
+that it results in a problem: although its possible to get the right
+file name it is not necessarily possible to open the file. In general if
+you have problems with encodings in your filenames you should use the
+"convmv" program.
+
+**MONO_GC_PARAMS**
+When using Mono with the SGen garbage collector this variable controls
+several parameters of the collector. The variable's value is a comma
+separated list of words.
+
+**max-heap-size=***size*
+Sets the maximum size of the heap. The size is specified in bytes and
+must be a power of two. The suffixes \`k', \`m' and \`g' can be used to
+specify kilo-, mega- and gigabytes, respectively. The limit is the sum
+of the nursery, major heap and large object heap. Once the limit is
+reached the application will receive OutOfMemoryExceptions when trying
+to allocate. Not the full extent of memory set in max-heap-size could be
+available to satisfy a single allocation due to internal fragmentation.
+By default heap limits is disabled and the GC will try to use all
+available memory.
+
+**nursery-size=***size*
+Sets the size of the nursery. The size is specified in bytes and must be
+a power of two. The suffixes \`k', \`m' and \`g' can be used to specify
+kilo-, mega- and gigabytes, respectively. The nursery is the first
+generation (of two). A larger nursery will usually speed up the program
+but will obviously use more memory. The default nursery size 4 MB.
+
+**major=***collector*
+Specifies which major collector to use. Options are \`marksweep' for the
+Mark&Sweep collector, \`marksweep-conc' for concurrent Mark&Sweep and
+\`marksweep-conc-par' for parallel and concurrent Mark&Sweep. The
+concurrent Mark&Sweep collector is the default.
+
+**mode=balanced\|throughput\|pause**\[:*max-pause*\]
+Specifies what should be the garbage collector's target. The
+\`throughput' mode aims to reduce time spent in the garbage collector
+and improve application speed, the \`pause' mode aims to keep pause
+times to a minimum and it receives the argument *max-pause* which
+specifies the maximum pause time in milliseconds that is acceptable and
+the \`balanced' mode which is a general purpose optimal mode.
+
+**soft-heap-limit=***size*
+Once the heap size gets larger than this size, ignore what the default
+major collection trigger metric says and only allow four nursery size's
+of major heap growth between major collections.
+
+**evacuation-threshold=***threshold*
+Sets the evacuation threshold in percent. This option is only available
+on the Mark&Sweep major collectors. The value must be an integer in the
+range 0 to 100. The default is 66. If the sweep phase of the collection
+finds that the occupancy of a specific heap block type is less than this
+percentage, it will do a copying collection for that block type in the
+next major collection, thereby restoring occupancy to close to 100
+percent. A value of 0 turns evacuation off.
+
+**(no-)lazy-sweep**
+Enables or disables lazy sweep for the Mark&Sweep collector. If enabled,
+the sweeping of individual major heap blocks is done piecemeal whenever
+the need arises, typically during nursery collections. Lazy sweeping is
+enabled by default.
+
+**(no-)concurrent-sweep**
+Enables or disables concurrent sweep for the Mark&Sweep collector. If
+enabled, the iteration of all major blocks to determine which ones can
+be freed and which ones have to be kept and swept, is done concurrently
+with the running program. Concurrent sweeping is enabled by default.
+
+**stack-mark=***mark-mode*
+Specifies how application threads should be scanned. Options are
+\`precise\` and \`conservative\`. Precise marking allow the collector to
+know what values on stack are references and what are not. Conservative
+marking threats all values as potentially references and leave them
+untouched. Precise marking reduces floating garbage and can speed up
+nursery collection and allocation rate, it has the downside of requiring
+a significant extra memory per compiled method. The right option,
+unfortunately, requires experimentation.
+
+**save-target-ratio=***ratio*
+Specifies the target save ratio for the major collector. The collector
+lets a given amount of memory to be promoted from the nursery due to
+minor collections before it triggers a major collection. This amount is
+based on how much memory it expects to free. It is represented as a
+ratio of the size of the heap after a major collection. Valid values are
+between 0.1 and 2.0. The default is 0.5. Smaller values will keep the
+major heap size smaller but will trigger more major collections.
+Likewise, bigger values will use more memory and result in less frequent
+major collections. This option is EXPERIMENTAL, so it might disappear in
+later versions of mono.
+
+**default-allowance-ratio=***ratio*
+Specifies the default allocation allowance when the calculated size is
+too small. The allocation allowance is how much memory the collector let
+be promoted before triggered a major collection. It is a ratio of the
+nursery size. Valid values are between 1.0 and 10.0. The default is 4.0.
+Smaller values lead to smaller heaps and more frequent major
+collections. Likewise, bigger values will allow the heap to grow faster
+but use more memory when it reaches a stable size. This option is
+EXPERIMENTAL, so it might disappear in later versions of mono.
+
+**minor=***minor-collector*
+Specifies which minor collector to use. Options are \`simple' which
+promotes all objects from the nursery directly to the old generation,
+\`simple-par' which has same promotion behavior as \`simple' but using
+multiple workers and \`split' which lets objects stay longer on the
+nursery before promoting.
+
+**alloc-ratio=***ratio*
+Specifies the ratio of memory from the nursery to be use by the alloc
+space. This only can only be used with the split minor collector. Valid
+values are integers between 1 and 100. Default is 60.
+
+**promotion-age=***age*
+Specifies the required age of an object must reach inside the nursery
+before been promoted to the old generation. This only can only be used
+with the split minor collector. Valid values are integers between 1
+and 14. Default is 2.
+
+**(no-)cementing**
+Enables or disables cementing. This can dramatically shorten nursery
+collection times on some benchmarks where pinned objects are referred to
+from the major heap.
+
+**allow-synchronous-major**
+This forbids the major collector from performing synchronous major
+collections. The major collector might want to do a synchronous
+collection due to excessive fragmentation. Disabling this might trigger
+OutOfMemory error in situations that would otherwise not happen.
+
+**MONO_GC_DEBUG**
+When using Mono with the SGen garbage collector this environment
+variable can be used to turn on various debugging features of the
+collector. The value of this variable is a comma separated list of
+words. Do not use these options in production.
+
+*number*
+Sets the debug level to the specified number.
+
+**print-allowance**
+After each major collection prints memory consumption for before and
+after the collection and the allowance for the minor collector, i.e. how
+much the heap is allowed to grow from minor collections before the next
+major collection is triggered.
+
+**print-pinning**
+Gathers statistics on the classes whose objects are pinned in the
+nursery and for which global remset entries are added. Prints those
+statistics when shutting down.
+
+**collect-before-allocs**
+**check-remset-consistency**
+This performs a remset consistency check at various opportunities, and
+also clears the nursery at collection time, instead of the default, when
+buffers are allocated (clear-at-gc). The consistency check ensures that
+there are no major to minor references that are not on the remembered
+sets.
+
+**mod-union-consistency-check**
+Checks that the mod-union cardtable is consistent before each finishing
+major collection pause. This check is only applicable to concurrent
+major collectors.
+
+**check-mark-bits**
+Checks that mark bits in the major heap are consistent at the end of
+each major collection. Consistent mark bits mean that if an object is
+marked, all objects that it had references to must also be marked.
+
+**check-nursery-untag**
+After garbage collections, check whether all vtable pointers are no
+longer tagged.
+
+**xdomain-checks**
+Performs a check to make sure that no references are left to an unloaded
+AppDomain.
+
+**clear-at-tlab-creation**
+Clears the nursery incrementally when the thread local allocation
+buffers (TLAB) are created. The default setting clears the whole nursery
+at GC time.
+
+**debug-clear-at-tlab-creation**
+Clears the nursery incrementally when the thread local allocation
+buffers (TLAB) are created, but at GC time fills it with the byte
+\`0xff\`, which should result in a crash more quickly if
+\`clear-at-tlab-creation\` doesn't work properly.
+
+**clear-at-gc**
+This clears the nursery at GC time instead of doing it when the thread
+local allocation buffer (TLAB) is created. The default is to clear the
+nursery at TLAB creation time.
+
+**disable-minor**
+Don't do minor collections. If the nursery is full, a major collection
+is triggered instead, unless it, too, is disabled.
+
+**disable-major**
+Don't do major collections.
+
+**conservative-stack-mark**
+Forces the GC to scan the stack conservatively, even if precise scanning
+is available.
+
+**no-managed-allocator**
+Disables the managed allocator.
+
+**managed-allocator**
+Enables the managed allocator.
+
+**check-scan-starts**
+If set, does a plausibility check on the scan_starts before and after
+each collection
+
+**verify-nursery-at-minor-gc**
+If set, does a complete object walk of the nursery at the start of each
+minor collection.
+
+**dump-nursery-at-minor-gc**
+If set, dumps the contents of the nursery at the start of each minor
+collection. Requires verify-nursery-at-minor-gc to be set.
+
+**heap-dump=***file*
+Dumps the heap contents to the specified file. To visualize the
+information, use the mono-heapviz tool.
+
+**binary-protocol=***file*
+Outputs the debugging output to the specified file. For this to work,
+Mono needs to be compiled with the BINARY_PROTOCOL define on sgen-gc.c.
+You can then use this command to explore the output
+
+ sgen-grep-binprot 0x1234 0x5678 < file
+
+**nursery-canaries**
+If set, objects allocated in the nursery are suffixed with a canary
+(guard) word, which is checked on each minor collection. Can be used to
+detect/debug heap corruption issues. This disables the usage of the
+managed allocator, because allocation from full aot code is inconsistent
+with this option. If the application is guaranteed not to use aot code,
+the managed allocator can be enabled back with managed-allocator option.
+
+**do-not-finalize(=***classes***)**
+If enabled, finalizers will not be run. Everything else will be
+unaffected: finalizable objects will still be put into the finalization
+queue where they survive until they're scheduled to finalize. Once
+they're not in the queue anymore they will be collected regularly. If a
+list of comma-separated class names is given, only objects from those
+classes will not be finalized.
+
+**log-finalizers**
+Log verbosely around the finalization process to aid debugging.
+
+**MONO_GAC_PREFIX**
+Provides a prefix the runtime uses to look for Global Assembly Caches.
+Directories are separated by the platform path separator (colons on
+unix). MONO_GAC_PREFIX should point to the top directory of a prefixed
+install. Or to the directory provided in the gacutil /gacdir command.
+Example: **/home/username/.mono:/usr/local/mono/**
+
+**MONO_IOMAP**
+(deprecated) Enabled some filename rewriting support to assist
+badly-written applications that hard-code Windows paths. It no longer
+works as of Mono 6.0.
+
+**MONO_LLVM**
+When Mono is using the LLVM code generation backend you can use this
+environment variable to pass code generation options to the LLVM
+compiler.
+
+**MONO_MANAGED_WATCHER**
+If set to "disabled", System.IO.FileSystemWatcher will use a file
+watcher implementation which silently ignores all the watching requests.
+If set to any other value, System.IO.FileSystemWatcher will use the
+default managed implementation (slow). If unset, mono will try to use
+inotify, FAM, Gamin, kevent under Unix systems and native API calls on
+Windows, falling back to the managed implementation on error.
+
+**MONO_MESSAGING_PROVIDER**
+Mono supports a plugin model for its implementation of System.Messaging
+making it possible to support a variety of messaging implementations
+(e.g. AMQP, ActiveMQ). To specify which messaging implementation is to
+be used the evironement variable needs to be set to the full class name
+for the provider. E.g. to use the RabbitMQ based AMQP implementation the
+variable should be set to:
+
+ Mono.Messaging.RabbitMQ.RabbitMQMessagingProvider,Mono.Messaging.RabbitMQ
+ MONO_NO_SMP
+ If set causes the mono process to be bound to a single processor. This may be
+ useful when debugging or working around race conditions.
+ MONO_NO_TLS
+ Disable inlining of thread local accesses. Try setting this if you get a segfault
+ early on in the execution of mono.
+ MONO_PATH
+ Provides a search path to the runtime where to look for library
+ files. This is a tool convenient for debugging applications, but
+ should not be used by deployed applications as it breaks the assembly
+ loader in subtle ways.
+ Directories are separated by the platform path separator (colons on unix). Example:
+ /home/username/lib:/usr/local/mono/lib
+ Relative paths are resolved based on the launch-time current directory.
+ Alternative solutions to MONO_PATH include: installing libraries into
+ the Global Assembly Cache (see gacutil(1)) or having the dependent
+ libraries side-by-side with the main executable.
+ For a complete description of recommended practices for application
+ deployment, see
+ http://www.mono-project.com/docs/getting-started/application-deployment/
+ MONO_SHARED_DIR
+ If set its the directory where the ".wapi" handle state is stored.
+ This is the directory where the Windows I/O Emulation layer stores its
+ shared state data (files, events, mutexes, pipes). By default Mono
+ will store the ".wapi" directory in the users's home directory.
+ MONO_SHARED_HOSTNAME
+ Uses the string value of this variable as a replacement for the host name when
+ creating file names in the ".wapi" directory. This helps if the host name of
+ your machine is likely to be changed when a mono application is running or if
+ you have a .wapi directory shared among several different computers.
+ Mono typically uses the hostname to create the files that are used to
+ share state across multiple Mono processes. This is done to support
+ home directories that might be shared over the network.
+ MONO_STRICT_IO_EMULATION
+ If set, extra checks are made during IO operations. Currently, this
+ includes only advisory locks around file writes.
+ MONO_TLS_PROVIDER
+ This environment variable controls which TLS/SSL provider Mono will
+ use. The options are usually determined by the operating system where
+ Mono was compiled and the configuration options that were used for
+ it.
+ default
+ Uses the default TLS stack that the Mono runtime was configured with.
+ Usually this is configured to use Apple's SSL stack on Apple
+ platforms, and Boring SSL on other platforms.
+ apple
+ Forces the use of the Apple SSL stack, only works on Apple platforms.
+ btls
+ Forces the use of the BoringSSL stack. See
+ https://opensource.google.com/projects/boringssl for more information
+ about this stack.
+ legacy
+ This is the old Mono stack, which only supports SSL and TLS up to
+ version 1.0. It is deprecated and will be removed in the future.
+ MONO_TLS_SESSION_CACHE_TIMEOUT
+ The time, in seconds, that the SSL/TLS session cache will keep it's entry to
+ avoid a new negotiation between the client and a server. Negotiation are very
+ CPU intensive so an application-specific custom value may prove useful for
+ small embedded systems.
+ The default is 180 seconds.
+ MONO_THREADS_PER_CPU
+ The minimum number of threads in the general threadpool will be
+ MONO_THREADS_PER_CPU * number of CPUs. The default value for this
+ variable is 1.
+ MONO_XMLSERIALIZER_THS
+ Controls the threshold for the XmlSerializer to produce a custom
+ serializer for a given class instead of using the Reflection-based
+ interpreter. The possible values are `no' to disable the use of a
+ custom serializer or a number to indicate when the XmlSerializer
+ should start serializing. The default value is 50, which means that
+ the a custom serializer will be produced on the 50th use.
+ MONO_X509_REVOCATION_MODE
+ Sets the revocation mode used when validating a X509 certificate chain (https,
+ ftps, smtps...). The default is 'nocheck', which performs no revocation check
+ at all. The other possible values are 'offline', which performs CRL check (not
+ implemented yet) and 'online' which uses OCSP and CRL to verify the revocation
+ status (not implemented yet).
+ NO_PROXY
+ (Also no_proxy) If both HTTP_PROXY and NO_PROXY are
+ set, NO_PROXY will be treated as a comma-separated list of "bypass" domains
+ which will not be sent through the proxy. Domains in NO_PROXY may contain
+ wildcards, as in "*.mono-project.com" or "build????.local". Not supported on
+ Windows, Mac OS, iOS or Android.
+
+## ENVIRONMENT VARIABLES FOR DEBUGGING
+
+**MONO_ASPNET_NODELETE**
+If set to any value, temporary source files generated by ASP.NET support
+classes will not be removed. They will be kept in the user's temporary
+directory.
+
+**MONO_DEBUG**
+If set, enables some features of the runtime useful for debugging. This
+variable should contain a comma separated list of debugging options.
+Currently, the following options are supported:
+
+**align-small-structs**
+Enables small structs alignment to 4/8 bytes.
+
+**arm-use-fallback-tls**
+When this option is set on ARM, a fallback thread local store will be
+used instead of the default fast thread local storage primitives.
+
+**break-on-unverified**
+If this variable is set, when the Mono VM runs into a verification
+problem, instead of throwing an exception it will break into the
+debugger. This is useful when debugging verifier problems
+
+**casts**
+This option can be used to get more detailed information from
+InvalidCast exceptions, it will provide information about the types
+involved.
+
+**check-pinvoke-callconv**
+This option causes the runtime to check for calling convention
+mismatches when using pinvoke, i.e. mixing cdecl/stdcall. It only works
+on windows. If a mismatch is detected, an ExecutionEngineException is
+thrown.
+
+**collect-pagefault-stats**
+Collects information about pagefaults. This is used internally to track
+the number of page faults produced to load metadata. To display this
+information you must use this option with "--stats" command line option.
+
+**debug-domain-unload**
+When this option is set, the runtime will invalidate the domain memory
+pool instead of destroying it.
+
+**disable_omit_fp**
+Disables a compiler optimization where the frame pointer is omitted from
+the stack. This optimization can interact badly with debuggers.
+
+**dont-free-domains**
+This is an Optimization for multi-AppDomain applications (most commonly
+ASP.NET applications). Due to internal limitations Mono, Mono by default
+does not use typed allocations on multi-appDomain applications as they
+could leak memory when a domain is unloaded.
+
+Although this is a fine default, for applications that use more than on
+AppDomain heavily (for example, ASP.NET applications) it is worth
+trading off the small leaks for the increased performance (additionally,
+since ASP.NET applications are not likely going to unload the
+application domains on production systems, it is worth using this
+feature).
+
+**dyn-runtime-invoke**
+Instructs the runtime to try to use a generic runtime-invoke wrapper
+instead of creating one invoke wrapper.
+
+**explicit-null-checks**
+Makes the JIT generate an explicit NULL check on variable dereferences
+instead of depending on the operating system to raise a SIGSEGV or
+another form of trap event when an invalid memory location is accessed.
+
+**gdb**
+Equivalent to setting the **MONO_XDEBUG** variable, this emits symbols
+into a shared library as the code is JITed that can be loaded into GDB
+to inspect symbols.
+
+**gen-seq-points**
+Automatically generates sequence points where the IL stack is empty.
+These are places where the debugger can set a breakpoint.
+
+**llvm-disable-implicit-null-checks**
+Makes the LLVM backend use explicit NULL checks on variable dereferences
+instead of depending on operating system support for signals or traps
+when an invalid memory location is accessed. Unconditionally enabled by
+explicit-null-checks.
+
+**no-compact-seq-points**
+Unless the option is used, the runtime generates sequence points data
+that maps native offsets to IL offsets. Sequence point data is used to
+display IL offset in stacktraces. Stacktraces with IL offsets can be
+symbolicated using mono-symbolicate tool.
+
+**handle-sigint**
+Captures the interrupt signal (Control-C) and displays a stack trace
+when pressed. Useful to find out where the program is executing at a
+given point. This only displays the stack trace of a single thread.
+
+**init-stacks**
+Instructs the runtime to initialize the stack with some known values
+(0x2a on x86-64) at the start of a method to assist in debuggin the JIT
+engine.
+
+**keep-delegates**
+This option will leak delegate trampolines that are no longer referenced
+as to present the user with more information about a delegate misuse.
+Basically a delegate instance might be created, passed to unmanaged
+code, and no references kept in managed code, which will garbage collect
+the code. With this option it is possible to track down the source of
+the problems.
+
+**no-gdb-backtrace**
+This option will disable the GDB backtrace emitted by the runtime after
+a SIGSEGV or SIGABRT in unmanaged code.
+
+**partial-sharing**
+When this option is set, the runtime can share generated code between
+generic types effectively reducing the amount of code generated.
+
+**reverse-pinvoke-exceptions**
+This option will cause mono to abort with a descriptive message when
+during stack unwinding after an exception it reaches a native stack
+frame. This happens when a managed delegate is passed to native code,
+and the managed delegate throws an exception. Mono will normally try to
+unwind the stack to the first (managed) exception handler, and it will
+skip any native stack frames in the process. This leads to undefined
+behaviour (since mono doesn't know how to process native frames), leaks,
+and possibly crashes too.
+
+**single-imm-size**
+This guarantees that each time managed code is compiled the same
+instructions and registers are used, regardless of the size of used
+values.
+
+**soft-breakpoints**
+This option allows using single-steps and breakpoints in hardware where
+we cannot do it with signals.
+
+**suspend-on-native-crash**
+This option will suspend the program when a native crash occurs
+(SIGSEGV, SIGILL, ...). This is useful for debugging crashes which do
+not happen under gdb, since a live process contains more information
+than a core file.
+
+**suspend-on-sigsegv**
+Same as **suspend-on-native-crash**.
+
+**suspend-on-exception**
+This option will suspend the program when an exception occurs.
+
+**suspend-on-unhandled**
+This option will suspend the program when an unhandled exception occurs.
+
+**thread-dump-dir=DIR**
+Use DIR for storage thread dumps created by SIGQUIT.
+
+**weak-memory-model**
+Don't enforce the CLR memory model on platforms with weak memory models.
+This can introduce random crashes in some rare cases, for multithreaded
+environments. This can be used for a performance boost on applications
+that are single threaded.
+
+**verbose-gdb**
+Make gdb output on native crashes more verbose.
+
+**MONO_LOG_LEVEL**
+The logging level, possible values are \`error', \`critical',
+\`warning', \`message', \`info' and \`debug'. See the DEBUGGING section
+for more details.
+
+**MONO_LOG_MASK**
+Controls the domain of the Mono runtime that logging will apply to. If
+set, the log mask is changed to the set value. Possible values are "asm"
+(assembly loader), "type", "dll" (native library loader), "gc" (garbage
+collector), "cfg" (config file loader), "aot" (precompiler), "security"
+(e.g. Moonlight CoreCLR support) and "all". The default value is "all".
+Changing the mask value allows you to display only messages for a
+certain component. You can use multiple masks by comma separating them.
+For example to see config file messages and assembly loader messages set
+you mask to "asm,cfg".
+
+**MONO_LOG_DEST**
+Controls where trace log messages are written. If not set then the
+messages go to stdout. If set, the string either specifies a path to a
+file that will have messages appended to it, or the string "syslog" in
+which case the messages will be written to the system log. Under
+Windows, this is simulated by writing to a file called "mono.log".
+**MONO_LOG_HEADER** Controls whether trace log messages not directed to
+syslog have the id, timestamp, and pid as the prefix to the log message.
+To enable a header this environment variable need just be non-null.
+
+**MONO_TRACE**
+Used for runtime tracing of method calls. The format of the comma
+separated trace options is:
+
+
+
+
+ [-]M:method name
+ [-]N:namespace
+ [-]T:class name
+ [-]all
+ [-]program
+ disabled Trace output off upon start.
+
+You can toggle trace output on/off sending a SIGUSR2 signal to the
+program.
+
+**MONO_TRACE_LISTENER**
+If set, enables the System.Diagnostics.DefaultTraceListener, which will
+print the output of the System.Diagnostics Trace and Debug classes. It
+can be set to a filename, and to Console.Out or Console.Error to display
+output to standard output or standard error, respectively. If it's set
+to Console.Out or Console.Error you can append an optional prefix that
+will be used when writing messages like this:
+Console.Error:MyProgramName. See the
+System.Diagnostics.DefaultTraceListener documentation for more
+information.
+
+**MONO_WCF_TRACE**
+This eases WCF diagnostics functionality by simply outputs all log
+messages from WCF engine to "stdout", "stderr" or any file passed to
+this environment variable. The log format is the same as usual
+diagnostic output.
+
+**MONO_XEXCEPTIONS**
+This throws an exception when a X11 error is encountered; by default a
+message is displayed but execution continues
+
+**MONO_XMLSERIALIZER_DEBUG**
+Set this value to 1 to prevent the serializer from removing the
+temporary files that are created for fast serialization; This might be
+useful when debugging.
+
+**MONO_XSYNC**
+This is used in the System.Windows.Forms implementation when running
+with the X11 backend. This is used to debug problems in Windows.Forms as
+it forces all of the commands send to X11 server to be done
+synchronously. The default mode of operation is asynchronous which makes
+it hard to isolate the root of certain problems.
+
+**MONO_XDEBUG**
+When the the MONO_XDEBUG env var is set, debugging info for JITted code
+is emitted into a shared library, loadable into gdb. This enables, for
+example, to see managed frame names on gdb backtraces.
+
+**MONO_VERBOSE_METHOD**
+Enables the maximum JIT verbosity for the specified method. This is very
+helpfull to diagnose a miscompilation problems of a specific method.
+This can be a semicolon-separated list of method names to match. If the
+name is simple, this applies to any method with that name, otherwise you
+can use a mono method description (see the section METHOD DESCRIPTIONS).
+
+**MONO_JIT_DUMP_METHOD**
+Enables sending of the JITs intermediate representation for a specified
+method to the IdealGraphVisualizer tool.
+
+**MONO_VERBOSE_HWCAP**
+If set, makes the JIT output information about detected CPU features
+(such as SSE, CMOV, FCMOV, etc) to stdout.
+
+**MONO_CONSERVATIVE_HWCAP**
+If set, the JIT will not perform any hardware capability detection. This
+may be useful to pinpoint the cause of JIT issues. This is the default
+when Mono is built as an AOT cross compiler, so that the generated code
+will run on most hardware.
+
+**MONO_PROFILE**
+Equivalent to **--profile** argument.
+
+## VALGRIND
+
+If you want to use Valgrind, you will find the file \`mono.supp' useful,
+it contains the suppressions for the GC which trigger incorrect
+warnings. Use it like this:
+
+ valgrind --suppressions=mono.supp mono ...
+
+## DTRACE
+
+On some platforms, Mono can expose a set of DTrace probes (also known as
+user-land statically defined, USDT Probes).
+
+They are defined in the file \`mono.d'.
+**ves-init-begin, ves-init-end**
+
+Begin and end of runtime initialization.
+
+**method-compile-begin, method-compile-end**
+
+Begin and end of method compilation. The probe arguments are class name,
+method name and signature, and in case of method-compile-end success or
+failure of compilation.
+
+**gc-begin, gc-end**
+
+Begin and end of Garbage Collection.
+
+To verify the availability of the probes, run:
+ dtrace -P mono'$target' -l -c mono
+
+## PERMISSIONS
+
+Mono's Ping implementation for detecting network reachability can create
+the ICMP packets itself without requiring the system ping command to do
+the work. If you want to enable this on Linux for non-root users, you
+need to give the Mono binary special permissions.
+
+As root, run this command:
+
+ # setcap cap_net_raw=+ep /usr/bin/mono
+
+## FILES
+
+On Unix assemblies are loaded from the installation lib directory. If
+you set \`prefix' to /usr, the assemblies will be located in /usr/lib.
+On Windows, the assemblies are loaded from the directory where mono and
+mint live.
+
+**~/.mono/aot-cache**
+
+The directory for the ahead-of-time compiler demand creation assemblies
+are located.
+
+**/etc/mono/config, ~/.mono/config**
+
+Mono runtime configuration file. See the mono-config(5) manual page for
+more information.
+
+**~/.config/.mono/certs, /usr/share/.mono/certs**
+
+Contains Mono certificate stores for users / machine. See the certmgr(1)
+manual page for more information on managing certificate stores and the
+mozroots(1) page for information on how to import the Mozilla root
+certificates into the Mono certificate store.
+
+**~/.mono/assemblies/ASSEMBLY/ASSEMBLY.config**
+
+Files in this directory allow a user to customize the configuration for
+a given system assembly, the format is the one described in the
+mono-config(5) page.
+
+**~/.config/.mono/keypairs, /usr/share/.mono/keypairs**
+
+Contains Mono cryptographic keypairs for users / machine. They can be
+accessed by using a CspParameters object with DSACryptoServiceProvider
+and RSACryptoServiceProvider classes.
+
+**~/.config/.isolatedstorage, ~/.local/share/.isolatedstorage, /usr/share/.isolatedstorage**
+
+Contains Mono isolated storage for non-roaming users, roaming users and
+local machine. Isolated storage can be accessed using the classes from
+the System.IO.IsolatedStorage namespace.
+
+**\.config**
+
+Configuration information for individual assemblies is loaded by the
+runtime from side-by-side files with the .config files, see the
+http://www.mono-project.com/Config for more information.
+
+**Web.config, web.config**
+
+ASP.NET applications are configured through these files, the
+configuration is done on a per-directory basis. For more information on
+this subject see the http://www.mono-project.com/Config_system.web page.
+
+## MAILING LISTS
+
+Mailing lists are listed at the
+http://www.mono-project.com/community/help/mailing-lists/
+
+## WEB SITE
+
+http://www.mono-project.com
+
+## SEE ALSO
+
+**certmgr**(1), **cert-sync**(1), **csharp**(1), **gacutil**(1),
+**mcs**(1), **monodis**(1), **mono-config**(5), **mono-profilers**(1),
+**mprof-report**(1), **pdb2mdb**(1), **xsp**(1), **mod_mono**(8)
+
+For more information on AOT:
+http://www.mono-project.com/docs/advanced/aot/
+
+For ASP.NET-related documentation, see the xsp(1) manual page
diff --git a/docs/workflow/building/coreclr/linux-instructions.md b/docs/workflow/building/coreclr/linux-instructions.md
index 8e7457ff521850..3acec5e95ad9a9 100644
--- a/docs/workflow/building/coreclr/linux-instructions.md
+++ b/docs/workflow/building/coreclr/linux-instructions.md
@@ -59,14 +59,15 @@ docker pull mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-ar
All official builds are cross-builds with a rootfs for the target OS, and will use the clang version available on the container.
| Host OS | Target OS | Target Arch | Image location | crossrootfs location |
-| --------------------- | ------------ | --------------- | -------------------------------------------------------------------------------- | -------------------- |
-| CBL-mariner 2.0 (x64) | Alpine 3.13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-alpine` | `/crossrootfs/x64` |
-| CBL-mariner 2.0 (x64) | Ubuntu 16.04 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64` | `/crossrootfs/x64` |
-| CBL-mariner 2.0 (x64) | Alpine | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine` | `/crossrootfs/arm` |
-| CBL-mariner 2.0 (x64) | Ubuntu 16.04 | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm` | `/crossrootfs/arm` |
-| CBL-mariner 2.0 (x64) | Alpine | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64-alpine` | `/crossrootfs/arm64` |
-| CBL-mariner 2.0 (x64) | Ubuntu 16.04 | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64` | `/crossrootfs/arm64` |
-| Ubuntu 18.04 (x64) | FreeBSD | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-freebsd-12` | `/crossrootfs/x64` |
+| --------------------- | ------------ | --------------- | -------------------------------------------------------------------------------------- | -------------------- |
+| Azure Linux (x64) | Alpine 3.13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net9.0` | `/crossrootfs/x64` |
+| Azure Linux (x64) | Ubuntu 16.04 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0` | `/crossrootfs/x64` |
+| Azure Linux (x64) | Alpine | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-alpine-net9.0` | `/crossrootfs/arm` |
+| Azure Linux (x64) | Ubuntu 16.04 | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-net9.0` | `/crossrootfs/arm` |
+| Azure Linux (x64) | Alpine | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-alpine-net9.0` | `/crossrootfs/arm64` |
+| Azure Linux (x64) | Ubuntu 16.04 | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-net9.0` | `/crossrootfs/arm64` |
+| Azure Linux (x64) | Ubuntu 16.04 | x86 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-x86-net9.0` | `/crossrootfs/x86` |
+| CBL-mariner 2.0 (x64) | FreeBSD 13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-freebsd-13` | `/crossrootfs/x64` |
These Docker images are built using the Dockerfiles maintained in the [dotnet-buildtools-prereqs-docker repo](https://github.com/dotnet/dotnet-buildtools-prereqs-docker).
diff --git a/eng/DotNetBuild.props b/eng/DotNetBuild.props
index 52abdf92f094a2..9d68b8265b0c1f 100644
--- a/eng/DotNetBuild.props
+++ b/eng/DotNetBuild.props
@@ -84,6 +84,7 @@
$(InnerBuildArgs) /p:SourceBuiltNonShippingPackagesDir=$(SourceBuiltNonShippingPackagesDir)
$(InnerBuildArgs) /p:SourceBuiltAssetManifestsDir=$(SourceBuiltAssetManifestsDir)
$(InnerBuildArgs) /p:SourceBuiltSymbolsDir=$(SourceBuiltSymbolsDir)
+ $(InnerBuildArgs) /p:GitHubRepositoryName=$(GitHubRepositoryName)
diff --git a/eng/Subsets.props b/eng/Subsets.props
index 499e9c3cb645fe..0a51b12649d1e8 100644
--- a/eng/Subsets.props
+++ b/eng/Subsets.props
@@ -173,6 +173,8 @@
+
+
@@ -358,6 +360,8 @@
+
+
@@ -369,6 +373,10 @@
Test="true" Category="clr" Condition="'$(DotNetBuildSourceOnly)' != 'true' and '$(NativeAotSupported)' == 'true'"/>
+
+
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 81d16576e39003..1f961bfe2544fc 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,8 +1,8 @@
-
+
https://github.com/dotnet/icu
- 0ea0175965771285846b5d077bebe5946036a595
+ 3230636546e2b2dc9648c4385c201e28a550f0ea
https://github.com/dotnet/msquic
@@ -12,9 +12,9 @@
https://github.com/dotnet/wcf
7f504aabb1988e9a093c1e74d8040bd52feb2f01
-
+
https://github.com/dotnet/emsdk
- 19c9523f5c2dd091b49959700723af795d6ad2b4
+ 53288f87c588907e8ff01f129786820fe998573c
https://github.com/dotnet/llvm-project
@@ -58,24 +58,24 @@
a045dd54a4c44723c215d992288160eb1401bb7f
-
+
https://github.com/dotnet/cecil
- 861f49c137941b9722a43e5993ccac7716c8528c
+ 7a4a59f9f66baf6711a6ce2de01d3b2c62ed72d8
-
+
https://github.com/dotnet/cecil
- 861f49c137941b9722a43e5993ccac7716c8528c
+ 7a4a59f9f66baf6711a6ce2de01d3b2c62ed72d8
-
+
https://github.com/dotnet/emsdk
- 19c9523f5c2dd091b49959700723af795d6ad2b4
+ 53288f87c588907e8ff01f129786820fe998573c
-
+
https://github.com/dotnet/emsdk
- 19c9523f5c2dd091b49959700723af795d6ad2b4
+ 53288f87c588907e8ff01f129786820fe998573c
@@ -85,146 +85,146 @@
-
+
https://github.com/dotnet/source-build-externals
- 5a273649709de76f61957e3d69e1f031e5ac82e2
+ b02769661c9a51985877819e8bdebfbcbee65710
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
https://github.com/dotnet/llvm-project
@@ -282,127 +282,127 @@
https://github.com/dotnet/llvm-project
26f8c30340764cfa7fa9090dc01a36c222bf09c1
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/runtime
- 85fbd98765c47a867564fff6ae18cc92423cdc66
+ 7745b5ec3db34cd358b26710c0ec32db8b0b23f7
-
+
https://github.com/dotnet/xharness
- 50b43ece7daf9f8a88ac16a95a4f8647a4c71c4b
+ ff14b0c0b6d72bf4447d57758a40dbf9494f1ac0
-
+
https://github.com/dotnet/xharness
- 50b43ece7daf9f8a88ac16a95a4f8647a4c71c4b
+ ff14b0c0b6d72bf4447d57758a40dbf9494f1ac0
-
+
https://github.com/dotnet/xharness
- 50b43ece7daf9f8a88ac16a95a4f8647a4c71c4b
+ ff14b0c0b6d72bf4447d57758a40dbf9494f1ac0
-
+
https://github.com/dotnet/arcade
- 8ec8057ac5073b6b2e3fcb0a33d588d2a3357ad3
+ 020255bcf7d0b8beed7de05338d97396982ae527
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
-
+
https://github.com/dotnet/hotreload-utils
- 4670b9e37293570f8d93d6af40c4710e2686bf67
+ 249050528f8ac9882f04b2c719bda3e5a532b258
-
+
https://github.com/dotnet/runtime-assets
- 30b6a8d9d3af5681e4caef1ea453619a4b0e9f2e
+ 422b5e7d39642735eacc85b2a060abb3899ce497
-
+
https://github.com/dotnet/roslyn
- ca66296efa86bd8078508fe7b38b91b415364f78
+ 75995e26b4c6f9a30ace7bcb65c0b4e42c0b397c
-
+
https://github.com/dotnet/roslyn
- ca66296efa86bd8078508fe7b38b91b415364f78
+ 75995e26b4c6f9a30ace7bcb65c0b4e42c0b397c
-
+
https://github.com/dotnet/roslyn
- ca66296efa86bd8078508fe7b38b91b415364f78
+ 75995e26b4c6f9a30ace7bcb65c0b4e42c0b397c
-
+
https://github.com/dotnet/roslyn-analyzers
- b07c100bfc66013a8444172d00cfa04c9ceb5a97
+ 8dccccec1ce3bd2fb532ec77d7e092ab9d684db7
-
+
https://github.com/dotnet/roslyn-analyzers
- b07c100bfc66013a8444172d00cfa04c9ceb5a97
+ 8dccccec1ce3bd2fb532ec77d7e092ab9d684db7
-
+
https://github.com/dotnet/roslyn
- ca66296efa86bd8078508fe7b38b91b415364f78
+ 75995e26b4c6f9a30ace7bcb65c0b4e42c0b397c
-
+
https://github.com/dotnet/sdk
- cf8c24575410adf397c0823fd7061f9451049ea1
+ 0f7644da23265f1be382b28ff56f5505b0329334
-
+
https://github.com/dotnet/sdk
- cf8c24575410adf397c0823fd7061f9451049ea1
+ 0f7644da23265f1be382b28ff56f5505b0329334
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ c3acfd159662959ff09f3a0d7663023db48bb78a
diff --git a/eng/Versions.props b/eng/Versions.props
index 46b53f8ae070c9..3f0cf7c4d63ade 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -11,7 +11,7 @@
7.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet8)').Build),14))
6.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet7)').Build),11))
preview
- 4
+ 5
false
release
@@ -34,17 +34,17 @@
- 3.11.0-beta1.24216.2
- 9.0.0-preview.24216.2
+ 3.11.0-beta1.24225.1
+ 9.0.0-preview.24225.1
- 4.11.0-1.24215.10
- 4.11.0-1.24215.10
- 4.11.0-1.24215.10
+ 4.11.0-1.24228.2
+ 4.11.0-1.24228.2
+ 4.11.0-1.24228.2
- 9.0.100-preview.4.24215.1
+ 9.0.100-preview.5.24227.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 2.6.7-beta.24217.1
- 9.0.0-beta.24217.1
- 2.6.7-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
- 9.0.0-beta.24217.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 2.6.7-beta.24253.1
+ 9.0.0-beta.24253.1
+ 2.6.7-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
+ 9.0.0-beta.24253.1
1.4.0
6.0.0-preview.1.102
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
6.0.0
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
6.0.0
1.1.1
@@ -119,46 +119,46 @@
8.0.0
5.0.0
4.5.5
- 9.0.0-preview.4.24215.1
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
+ 9.0.0-preview.4.24229.1
6.0.0
5.0.0
5.0.0
5.0.0
7.0.0
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
6.0.0
7.0.0
4.5.4
4.5.0
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
8.0.0
8.0.0
8.0.0
8.0.0
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
- 9.0.0-beta.24215.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
+ 9.0.0-beta.24229.1
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
+ 1.0.0-prerelease.24223.3
+ 1.0.0-prerelease.24223.3
+ 1.0.0-prerelease.24223.3
+ 1.0.0-prerelease.24223.3
+ 1.0.0-prerelease.24223.3
+ 1.0.0-prerelease.24223.3
2.0.0
17.8.0-beta1.23475.2
@@ -179,10 +179,10 @@
1.4.0
17.4.0-preview-20220707-01
- 9.0.0-prerelease.24208.1
- 9.0.0-prerelease.24208.1
- 9.0.0-prerelease.24208.1
- 9.0.0-alpha.0.24215.1
+ 9.0.0-prerelease.24229.1
+ 9.0.0-prerelease.24229.1
+ 9.0.0-prerelease.24229.1
+ 9.0.0-alpha.0.24222.1
3.12.0
4.5.0
6.0.0
@@ -208,11 +208,11 @@
8.0.0-preview-20230918.1
- 0.11.4-alpha.24215.1
+ 0.11.4-alpha.24230.1
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.4.24229.1
- 9.0.0-preview.4.24215.1
+ 9.0.0-preview.5.24230.1
2.3.5
9.0.0-alpha.1.24167.3
@@ -235,9 +235,9 @@
Note: when the name is updated, make sure to update dependency name in eng/pipelines/common/xplat-setup.yml
like - DarcDependenciesChanged.Microsoft_NET_Workload_Emscripten_Current_Manifest-9_0_100_Transport
-->
- 9.0.0-preview.4.24215.3
+ 9.0.0-preview.5.24223.2
$(MicrosoftNETWorkloadEmscriptenCurrentManifest90100TransportVersion)
- 9.0.0-preview.4.24215.3
+ 9.0.0-preview.5.24223.2
1.1.87-gba258badda
1.0.0-v3.14.0.5722
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 83e6d82e027a82..438f9920c43e4e 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -19,7 +19,6 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
- [switch] $verticalBuild,
[switch][Alias('pb')]$productBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
@@ -60,7 +59,6 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
- Write-Host " -verticalBuild Run in 'vertical build' infra mode."
Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
@@ -124,7 +122,7 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
- /p:DotNetBuildRepo=$($productBuild -or $verticalBuild) `
+ /p:DotNetBuildRepo=$productBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
diff --git a/eng/common/build.sh b/eng/common/build.sh
index d82ebf7428080f..ac1ee8620cd2ab 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -62,7 +62,6 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
-vertical_build=false
product_build=false
rebuild=false
test=false
@@ -141,13 +140,6 @@ while [[ $# > 0 ]]; do
restore=true
pack=true
;;
- -verticalbuild|-vb)
- build=true
- vertical_build=true
- product_build=true
- restore=true
- pack=true
- ;;
-test|-t)
test=true
;;
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
new file mode 100644
index 00000000000000..dc3bd560a50e24
--- /dev/null
+++ b/eng/common/core-templates/job/job.yml
@@ -0,0 +1,266 @@
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+ templateContext: {}
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ # publishing defaults
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ enableBuildRetry: false
+ disableComponentGovernance: ''
+ componentGovernanceIgnoreDirectories: ''
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ artifactPublishSteps: []
+ runAsPublic: false
+
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
+# 1es specific parameters
+ is1ESPipeline: ''
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
+ ${{ else }}:
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ PackageVersion: ${{ parameters.packageVersion}}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+ publishArtifacts: false
+
+ # Publish test results
+ - ${{ if and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')) }}:
+ - ${{ if eq(parameters.testResultsFormat, 'xunit') }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if eq(parameters.testResultsFormat, 'vstest') }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ # gather artifacts
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log/$(_BuildConfig)'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather buildconfiguration for build retry
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
+
+ - ${{ each step in parameters.artifactPublishSteps }}:
+ - ${{ step }}
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
new file mode 100644
index 00000000000000..00feec8ebbc3ab
--- /dev/null
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -0,0 +1,121 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+ is1ESPipeline: ''
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Localization Files
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish LocProject.json
+ pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
new file mode 100644
index 00000000000000..8fe9299542c531
--- /dev/null
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -0,0 +1,172 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+ is1ESPipeline: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: self
+ fetchDepth: 3
+ clean: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro.dot.net
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
+ $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
+ Add-Content -Path $filePath -Value $(BARBuildId)
+ Add-Content -Path $filePath -Value "$(DefaultChannels)"
+ Add-Content -Path $filePath -Value $(IsStableBuild)
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish ReleaseConfigs Artifact
+ pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
+ publishLocation: Container
+ artifactName: ReleaseConfigs
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exist"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ pathToPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ publishLocation: Container
+ artifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
new file mode 100644
index 00000000000000..c0ce4b3c861861
--- /dev/null
+++ b/eng/common/core-templates/job/source-build.yml
@@ -0,0 +1,80 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+ is1ESPipeline: ''
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.ubuntu.2004.amd64
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: 1es-mariner-2
+ os: linux
+ ${{ else }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
new file mode 100644
index 00000000000000..9c6e5ae3c3e45a
--- /dev/null
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -0,0 +1,73 @@
+parameters:
+ runAsPublic: false
+ sourceIndexPackageVersion: 1.0.1-20240129.2
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+ is1ESPipeline: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexPackageVersion
+ value: ${{ parameters.sourceIndexPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: source-dot-net stage1 variables
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ image: windows.vs2022.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2022.amd64
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET 8 SDK
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
+ displayName: Upload stage1 artifacts to source index
+ env:
+ BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml
new file mode 100644
index 00000000000000..f2144252cc65c8
--- /dev/null
+++ b/eng/common/core-templates/jobs/codeql-build.yml
@@ -0,0 +1,33 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ is1ESPipeline: ''
+
+jobs:
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
new file mode 100644
index 00000000000000..ea69be4341c62f
--- /dev/null
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -0,0 +1,119 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/core-templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+ artifacts: {}
+ is1ESPipeline: ''
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+ - ${{ else }}:
+ - template: /eng/common/templates/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml
new file mode 100644
index 00000000000000..d8e5d008522682
--- /dev/null
+++ b/eng/common/core-templates/jobs/source-build.yml
@@ -0,0 +1,50 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/core-templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+ is1ESPipeline: ''
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml
new file mode 100644
index 00000000000000..b9ede10bf099ae
--- /dev/null
+++ b/eng/common/core-templates/post-build/common-variables.yml
@@ -0,0 +1,24 @@
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro.dot.net"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
new file mode 100644
index 00000000000000..ed1e6692f739b2
--- /dev/null
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -0,0 +1,298 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+ - name: is1ESPipeline
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 00000000000000..8d56b5726793f8
--- /dev/null
+++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,74 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ is1ESPipeline: ''
+
+steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ pwsh: true
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/core-templates/post-build/trigger-subscription.yml b/eng/common/core-templates/post-build/trigger-subscription.yml
new file mode 100644
index 00000000000000..da669030daf6e9
--- /dev/null
+++ b/eng/common/core-templates/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/core-templates/steps/add-build-to-channel.yml b/eng/common/core-templates/steps/add-build-to-channel.yml
new file mode 100644
index 00000000000000..f67a210d62f3e5
--- /dev/null
+++ b/eng/common/core-templates/steps/add-build-to-channel.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml
new file mode 100644
index 00000000000000..df449a34c11207
--- /dev/null
+++ b/eng/common/core-templates/steps/component-governance.yml
@@ -0,0 +1,14 @@
+parameters:
+ disableComponentGovernance: false
+ componentGovernanceIgnoreDirectories: ''
+ is1ESPipeline: false
+
+steps:
+- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+ inputs:
+ ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
new file mode 100644
index 00000000000000..d938b60e1bb534
--- /dev/null
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -0,0 +1,54 @@
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
+
+parameters:
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ IgnoreDirectories: ''
+ sbomContinueOnError: true
+ is1ESPipeline: false
+ # disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
+ publishArtifacts: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+ ${{ if ne(parameters.IgnoreDirectories, '') }}:
+ AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
+
+- ${{ if eq(parameters.publishArtifacts, 'true')}}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ targetPath: '${{ parameters.manifestDirPath }}'
+ artifactName: $(ARTIFACT_NAME)
+
diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000000..f24ce346684e60
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+- name: args
+ type: object
+ default: {}
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
new file mode 100644
index 00000000000000..8c5ea77b586d27
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -0,0 +1,59 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
+ is1ESPipeline: false
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(akams-client-secret)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
+- task: CopyFiles@2
+ displayName: Gather post build logs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+
+- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ publishLocation: Container
+ artifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000000..2efec04dc2c163
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml
new file mode 100644
index 00000000000000..83d97a26a01ff9
--- /dev/null
+++ b/eng/common/core-templates/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml
new file mode 100644
index 00000000000000..68fa739c4ab215
--- /dev/null
+++ b/eng/common/core-templates/steps/send-to-helix.yml
@@ -0,0 +1,93 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
+ HelixProjectArguments: '' # optional -- arguments passed to the build command
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
new file mode 100644
index 00000000000000..bdd725b496f91b
--- /dev/null
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -0,0 +1,134 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+ is1ESPipeline: false
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
+ # In that case, call the feed setup script to add internal feeds corresponding to public ones.
+ # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
+ # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
+ # changes.
+ internalRestoreArgs=
+ if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
+ # Temporarily work around https://github.com/dotnet/arcade/issues/7709
+ chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ runtimeOsArgs=
+ if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
+ runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
+ fi
+
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ assetManifestFileName=SourceBuild_RidSpecific.xml
+ if [ '${{ parameters.platform.name }}' != '' ]; then
+ assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ $runtimeOsArgs \
+ $baseOsArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true \
+ /p:DotNetBuildSourceOnly=true \
+ /p:DotNetBuildRepo=true \
+ /p:AssetManifestFileName=$assetManifestFileName
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/sb/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish BuildLogs
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml
new file mode 100644
index 00000000000000..41053d382a2e10
--- /dev/null
+++ b/eng/common/core-templates/variables/pool-providers.yml
@@ -0,0 +1,8 @@
+parameters:
+ is1ESPipeline: false
+
+variables:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ - template: /eng/common/templates/variables/pool-providers.yml
\ No newline at end of file
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
index 3762640fdcf792..9a4e285a5ae3f0 100644
--- a/eng/common/cross/toolchain.cmake
+++ b/eng/common/cross/toolchain.cmake
@@ -382,6 +382,26 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
endif()
endif()
+# Set C++ standard library options if specified
+set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_STANDARD_LIBRARY)
+ add_compile_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+ add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+endif()
+
+option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
+if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
+ add_link_options($<$:-static-libstdc++>)
+endif()
+
+set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_ABI_LIBRARY)
+ # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
+ string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
+ # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
+ add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
+endif()
+
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index afdeb7a4d54aee..ccd3a17268e243 100644
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -64,7 +64,7 @@ if [ -z "$CLR_CC" ]; then
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
- elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
+ elif [ "$compiler" = "gcc" ]; then versions="14 13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
_major="${version%%.*}"
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index 091023970f1c9c..aab40de3fd9aca 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.5" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.10.0-pre.4.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
new file mode 100644
index 00000000000000..c114bc28dcb95d
--- /dev/null
+++ b/eng/common/template-guidance.md
@@ -0,0 +1,137 @@
+# Overview
+
+Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
+
+## How to use
+
+Basic guidance is:
+
+- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
+
+- All other runs should reference `eng/common/templates`.
+
+See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
+
+#### The `templateIs1ESManaged` parameter
+
+The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
+
+- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
+
+## Multiple outputs
+
+1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
+
+Example:
+``` yaml
+# azure-pipelines.yml
+extends:
+ template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
+ parameters:
+ stages:
+ - stage: build
+ jobs:
+ - template: /eng/common/templates-official/jobs/jobs.yml@self
+ parameters:
+ # 1ES makes use of outputs to reduce security task injection overhead
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish logs from source'
+ continueOnError: true
+ condition: always()
+ targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
+ artifactName: Logs
+ jobs:
+ - job: Windows
+ steps:
+ - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
+ # copy build outputs to artifact staging directory for publishing
+ - task: CopyFiles@2
+ displayName: Gather build output
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
+```
+
+Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
+
+# Development notes
+
+**Folder / file structure**
+
+``` text
+eng\common\
+ [templates || templates-official]\
+ job\
+ job.yml (shim + artifact publishing logic)
+ onelocbuild.yml (shim)
+ publish-build-assets.yml (shim)
+ source-build.yml (shim)
+ source-index-stage1.yml (shim)
+ jobs\
+ codeql-build.yml (shim)
+ jobs.yml (shim)
+ source-build.yml (shim)
+ post-build\
+ post-build.yml (shim)
+ trigger-subscription.yml (shim)
+ common-variabls.yml (shim)
+ setup-maestro-vars.yml (shim)
+ steps\
+ publish-build-artifacts.yml (logic)
+ publish-pipeline-artifacts.yml (logic)
+ add-build-channel.yml (shim)
+ component-governance.yml (shim)
+ generate-sbom.yml (shim)
+ publish-logs.yml (shim)
+ retain-build.yml (shim)
+ send-to-helix.yml (shim)
+ source-build.yml (shim)
+ variables\
+ pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
+ sdl-variables.yml (logic)
+ core-templates\
+ job\
+ job.yml (logic)
+ onelocbuild.yml (logic)
+ publish-build-assets.yml (logic)
+ source-build.yml (logic)
+ source-index-stage1.yml (logic)
+ jobs\
+ codeql-build.yml (logic)
+ jobs.yml (logic)
+ source-build.yml (logic)
+ post-build\
+ common-variabls.yml (logic)
+ post-build.yml (logic)
+ setup-maestro-vars.yml (logic)
+ trigger-subscription.yml (logic)
+ steps\
+ add-build-to-channel.yml (logic)
+ component-governance.yml (logic)
+ generate-sbom.yml (logic)
+ publish-build-artifacts.yml (redirect)
+ publish-logs.yml (logic)
+ publish-pipeline-artifacts.yml (redirect)
+ retain-build.yml (logic)
+ send-to-helix.yml (logic)
+ source-build.yml (logic)
+ variables\
+ pool-providers.yml (redirect)
+```
+
+In the table above, a file is designated as "shim", "logic", or "redirect".
+
+- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
+
+- logic - represents actual base template logic.
+
+- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
+
+Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
+
+Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
+
+Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 761acc5eb624c6..4724e9aaa80910 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -1,264 +1,62 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
-
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates-official/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: 'artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates-official/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
- continueOnError: true
\ No newline at end of file
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: true
+
+ # publish artifacts
+ # for 1ES managed templates, use the templateContext.output to handle multiple outputs.
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - output: buildArtifacts
+ displayName: Publish pipeline artifacts
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ condition: always()
+ continueOnError: true
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
+ - output: buildArtifacts
+ displayName: Publish Logs
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - output: pipelineArtifact
+ displayName: Publish SBOM manifest
+ continueOnError: true
+ targetPath: $(Build.ArtifactStagingDirectory)/sbom
+ artifactName: $(ARTIFACT_NAME)
+
+ # add any outputs provided via root yaml
+ - ${{ if ne(parameters.templateContext.outputs, '') }}:
+ - ${{ each output in parameters.templateContext.outputs }}:
+ - ${{ output }}
+
+ # add any remaining templateContext properties
+ ${{ each context in parameters.templateContext }}:
+ ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
+ ${{ context.key }}: ${{ context.value }}
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
index 52b4d05d3f8dd6..0f0c514b912dfc 100644
--- a/eng/common/templates-official/job/onelocbuild.yml
+++ b/eng/common/templates-official/job/onelocbuild.yml
@@ -1,112 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: true
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
index 38340d3e38614a..d667a70e8de743 100644
--- a/eng/common/templates-official/job/publish-build-assets.yml
+++ b/eng/common/templates-official/job/publish-build-assets.yml
@@ -1,159 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates-official/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- # unconditional - needed for logs publishing (redactor tool version)
- - template: /eng/common/templates-official/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
- fetchDepth: 3
- clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
- $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
- Add-Content -Path $filePath -Value $(BARBuildId)
- Add-Content -Path $filePath -Value "$(DefaultChannels)"
- Add-Content -Path $filePath -Value $(IsStableBuild)
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates-official/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
index 2180e97a284f84..1a480034b678eb 100644
--- a/eng/common/templates-official/job/source-build.yml
+++ b/eng/common/templates-official/job/source-build.yml
@@ -1,67 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals build.ubuntu.2004.amd64
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-mariner-2
- os: linux
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: true
- steps:
- - template: /eng/common/templates-official/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
index 53a9ef51fd82d2..6d5ead316f92b5 100644
--- a/eng/common/templates-official/job/source-index-stage1.yml
+++ b/eng/common/templates-official/job/source-index-stage1.yml
@@ -1,67 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- image: windows.vs2022.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
index b68d3c2f31990f..a726322ecfe016 100644
--- a/eng/common/templates-official/jobs/codeql-build.yml
+++ b/eng/common/templates-official/jobs/codeql-build.yml
@@ -1,31 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates-official/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: true
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
index 857a0f8ba43e84..007deddaea0f53 100644
--- a/eng/common/templates-official/jobs/jobs.yml
+++ b/eng/common/templates-official/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates-official/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates-official/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: true
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
index 2076f4e25b43c6..483e7b611f346b 100644
--- a/eng/common/templates-official/jobs/source-build.yml
+++ b/eng/common/templates-official/jobs/source-build.yml
@@ -1,46 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates-official/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: true
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
index b9ede10bf099ae..c32fc49233f8fd 100644
--- a/eng/common/templates-official/post-build/common-variables.yml
+++ b/eng/common/templates-official/post-build/common-variables.yml
@@ -1,24 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
- - name: BinlogToolVersion
- value: 1.0.11
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
index da1f40958b450d..2364c0fd4a527e 100644
--- a/eng/common/templates-official/post-build/post-build.yml
+++ b/eng/common/templates-official/post-build/post-build.yml
@@ -1,285 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
index 0c87f149a4ad77..024397d8786452 100644
--- a/eng/common/templates-official/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
index f67a210d62f3e5..543dea8c6969a6 100644
--- a/eng/common/templates-official/steps/add-build-to-channel.yml
+++ b/eng/common/templates-official/steps/add-build-to-channel.yml
@@ -1,13 +1,7 @@
-parameters:
- ChannelId: 0
-
steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
+- template: /eng/common/core-templates/steps/add-build-to-channel.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
index 0ecec47b0c9177..30bb3985ca2bf4 100644
--- a/eng/common/templates-official/steps/component-governance.yml
+++ b/eng/common/templates-official/steps/component-governance.yml
@@ -1,13 +1,7 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
index 488b560e8ba4eb..9a89a4706d94e4 100644
--- a/eng/common/templates-official/steps/generate-sbom.yml
+++ b/eng/common/templates-official/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: true
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000000..100a3fc98493cd
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-build-artifacts.yml
@@ -0,0 +1,41 @@
+parameters:
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
+
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
index 84b2f559c56e40..579fd531e94c38 100644
--- a/eng/common/templates-official/steps/publish-logs.yml
+++ b/eng/common/templates-official/steps/publish-logs.yml
@@ -1,49 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
- CustomSensitiveDataList: ''
- # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed
- BinlogToolVersion: '1.0.11'
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PowerShell@2
- displayName: Redact Logs
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
- # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- # If the file exists - sensitive data for redaction will be sourced from it
- # (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- '$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
- '$(dn-bot-all-orgs-artifact-feeds-rw)'
- '$(akams-client-id)'
- '$(akams-client-secret)'
- '$(microsoft-symbol-server-pat)'
- '$(symweb-symbol-server-pat)'
- '$(dn-bot-all-orgs-build-rw-code-rw)'
- ${{parameters.CustomSensitiveDataList}}
- continueOnError: true
- condition: always()
-
-- task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000000..d71eb0c7439862
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,26 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
index 83d97a26a01ff9..5594551508a3cf 100644
--- a/eng/common/templates-official/steps/retain-build.yml
+++ b/eng/common/templates-official/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
index 68fa739c4ab215..6500f21bf845ce 100644
--- a/eng/common/templates-official/steps/send-to-helix.yml
+++ b/eng/common/templates-official/steps/send-to-helix.yml
@@ -1,93 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
- HelixProjectArguments: '' # optional -- arguments passed to the build command
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
index 53ed57b6d48abc..8f92c49e7b06fc 100644
--- a/eng/common/templates-official/steps/source-build.yml
+++ b/eng/common/templates-official/steps/source-build.yml
@@ -1,131 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: true
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
deleted file mode 100644
index 7870f93bc17652..00000000000000
--- a/eng/common/templates/job/execute-sdl.yml
+++ /dev/null
@@ -1,139 +0,0 @@
-parameters:
- enable: 'false' # Whether the SDL validation job should execute or not
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
- # optional: determines if build artifacts should be downloaded.
- downloadArtifacts: true
- # optional: determines if this job should search the directory of downloaded artifacts for
- # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
- extractArchiveArtifacts: false
- dependsOn: '' # Optional: dependencies of the job
- artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
- # Usage:
- # artifactNames:
- # - 'BlobArtifacts'
- # - 'Artifacts_Windows_NT_Release'
- # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
- # not pipeline artifacts, so doesn't affect the use of this parameter.
- pipelineArtifactNames: []
-
-jobs:
-- job: Run_SDL
- dependsOn: ${{ parameters.dependsOn }}
- displayName: Run SDL tool
- condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
- variables:
- - group: DotNet-VSTS-Bot
- - name: AzDOProjectName
- value: ${{ parameters.AzDOProjectName }}
- - name: AzDOPipelineId
- value: ${{ parameters.AzDOPipelineId }}
- - name: AzDOBuildId
- value: ${{ parameters.AzDOBuildId }}
- - template: /eng/common/templates/variables/sdl-variables.yml
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- - template: /eng/common/templates/variables/pool-providers.yml
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - checkout: self
- clean: true
-
- # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
-
- - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- - ${{ if ne(parameters.artifactNames, '') }}:
- - ${{ each artifactName in parameters.artifactNames }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
- - ${{ if eq(parameters.artifactNames, '') }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- downloadType: specific files
- itemPattern: "**"
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - ${{ each artifactName in parameters.pipelineArtifactNames }}:
- - task: DownloadPipelineArtifact@2
- displayName: Download Pipeline Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - powershell: eng/common/sdl/trim-assets-version.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Trim the version from the NuGet packages
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- displayName: Extract Blob Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- displayName: Extract Package Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- - powershell: eng/common/sdl/extract-artifact-archives.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Extract Archive Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: ${{ parameters.additionalParameters }}
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index a3277bf15c51ff..1cf9a6d48127b6 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,259 +1,61 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
+parameters:
enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@3
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - publish: artifacts/log
- artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: Publish logs
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
- artifact: BuildConfiguration
- displayName: Publish build retry configuration
- continueOnError: true
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ steps:
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ artifactPublishSteps:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
index 60ab00c4de3acd..ff829dc4c700c6 100644
--- a/eng/common/templates/job/onelocbuild.yml
+++ b/eng/common/templates/job/onelocbuild.yml
@@ -1,109 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: false
- - task: PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index bb42240f865b56..ab2edec2adb541 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -1,155 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- # unconditional - needed for logs publishing (redactor tool version)
- - template: /eng/common/templates/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
- fetchDepth: 3
- clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
- - task: PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
index d7ed209494c7be..e44d47b1d760c4 100644
--- a/eng/common/templates/job/source-build.yml
+++ b/eng/common/templates/job/source-build.yml
@@ -1,66 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: false
- steps:
- - template: /eng/common/templates/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index b5a3e5c4a6c847..89f3291593cb78 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,67 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml
index f7dc5ea4aaa63c..517f24d6a52ce7 100644
--- a/eng/common/templates/jobs/codeql-build.yml
+++ b/eng/common/templates/jobs/codeql-build.yml
@@ -1,31 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: false
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index 289bb2396ce83e..388e9037b3e601 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: false
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
index da91481ff1d286..818d4c326dbbf1 100644
--- a/eng/common/templates/jobs/source-build.yml
+++ b/eng/common/templates/jobs/source-build.yml
@@ -1,46 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: false
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index b9ede10bf099ae..7fa105875592c8 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -1,24 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
- - name: BinlogToolVersion
- value: 1.0.11
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index ee70e2b399c5a9..53ede714bdd207 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -1,282 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
- - template: /eng/common/templates/job/execute-sdl.yml
- parameters:
- enable: ${{ parameters.SDLValidationParameters.enable }}
- publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
- downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
index 0c87f149a4ad77..a79fab5b441e84 100644
--- a/eng/common/templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml
index f67a210d62f3e5..42bbba161b9b6a 100644
--- a/eng/common/templates/steps/add-build-to-channel.yml
+++ b/eng/common/templates/steps/add-build-to-channel.yml
@@ -1,13 +1,7 @@
-parameters:
- ChannelId: 0
-
steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
+- template: /eng/common/core-templates/steps/add-build-to-channel.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
deleted file mode 100644
index eba58109b52c9d..00000000000000
--- a/eng/common/templates/steps/build-reason.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml
index 0ecec47b0c9177..c12a5f8d21d765 100644
--- a/eng/common/templates/steps/component-governance.yml
+++ b/eng/common/templates/steps/component-governance.yml
@@ -1,13 +1,7 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml
deleted file mode 100644
index 3930b1630214b3..00000000000000
--- a/eng/common/templates/steps/execute-codeql.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
deleted file mode 100644
index 07426fde05d824..00000000000000
--- a/eng/common/templates/steps/execute-sdl.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
- inputs:
- nuGetServiceConnections: GuardianConnect
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml
index a06373f38fa5d5..26dc00a2e0f31e 100644
--- a/eng/common/templates/steps/generate-sbom.yml
+++ b/eng/common/templates/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000000..6428a98dfef68e
--- /dev/null
+++ b/eng/common/templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,40 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
index 80861297ddc074..4ea86bd8823555 100644
--- a/eng/common/templates/steps/publish-logs.yml
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -1,49 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
- CustomSensitiveDataList: ''
- # A default - in case value from eng/common/templates/post-build/common-variables.yml is not passed
- BinlogToolVersion: '1.0.11'
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PowerShell@2
- displayName: Redact Logs
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
- # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- # If the file exists - sensitive data for redaction will be sourced from it
- # (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- '$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
- '$(dn-bot-all-orgs-artifact-feeds-rw)'
- '$(akams-client-id)'
- '$(akams-client-secret)'
- '$(microsoft-symbol-server-pat)'
- '$(symweb-symbol-server-pat)'
- '$(dn-bot-all-orgs-build-rw-code-rw)'
- ${{parameters.CustomSensitiveDataList}}
- continueOnError: true
- condition: always()
-
-- task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000000..5dd698b212fc6b
--- /dev/null
+++ b/eng/common/templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,34 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.publishLocation }}:
+ publishLocation: ${{ parameters.args.publishLocation }}
+ ${{ if parameters.args.fileSharePath }}:
+ fileSharePath: ${{ parameters.args.fileSharePath }}
+ ${{ if parameters.args.Parallel }}:
+ parallel: ${{ parameters.args.Parallel }}
+ ${{ if parameters.args.parallelCount }}:
+ parallelCount: ${{ parameters.args.parallelCount }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml
index 83d97a26a01ff9..8e841ace3d293f 100644
--- a/eng/common/templates/steps/retain-build.yml
+++ b/eng/common/templates/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
deleted file mode 100644
index e1733814f65dcc..00000000000000
--- a/eng/common/templates/steps/run-on-unix.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
deleted file mode 100644
index 73e7e9c275a1f1..00000000000000
--- a/eng/common/templates/steps/run-on-windows.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
deleted file mode 100644
index 3d1242f5587c82..00000000000000
--- a/eng/common/templates/steps/run-script-ifequalelse.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-parameters:
- # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
- parameter1: ''
- parameter2: ''
- ifScript: ''
- elseScript: ''
-
- # name of script step
- name: Script
-
- # display name of script step
- displayName: If-Equal-Else Script
-
- # environment
- env: {}
-
- # conditional expression for step execution
- condition: ''
-
-steps:
-- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.ifScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
-
-- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.elseScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 68fa739c4ab215..39f99fc2762d01 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -1,93 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
- HelixProjectArguments: '' # optional -- arguments passed to the build command
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index 32738aa938013e..23c1d6f4e9f8d4 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -1,131 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: false
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
deleted file mode 100644
index fadc04ca1b9a3e..00000000000000
--- a/eng/common/templates/steps/telemetry-end.yml
+++ /dev/null
@@ -1,102 +0,0 @@
-parameters:
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- bash: |
- if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
- errorCount=0
- else
- errorCount=1
- fi
- warningCount=0
-
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- # create a temporary file for curl output
- res=`mktemp`
-
- curlResult=`
- curl --verbose --output $res --write-out "%{http_code}"\
- -H 'Content-Type: application/json' \
- -H "X-Helix-Job-Token: $Helix_JobToken" \
- -H 'Content-Length: 0' \
- -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
- --data-urlencode "errorCount=$errorCount" \
- --data-urlencode "warningCount=$warningCount"`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- let retryCount++
- done
-
- if [ $curlStatus -ne 0 ]; then
- echo "Failed to Send Build Finish information after $retryCount retries"
- vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
- displayName: Send Unix Build End Telemetry
- env:
- # defined via VSTS variables in start-job.sh
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-- powershell: |
- if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
- $ErrorCount = 0
- } else {
- $ErrorCount = 1
- }
- $WarningCount = 0
-
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
- -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
- displayName: Send Windows Build End Telemetry
- env:
- # defined via VSTS variables in start-job.ps1
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
deleted file mode 100644
index 32c01ef0b553b4..00000000000000
--- a/eng/common/templates/steps/telemetry-start.yml
+++ /dev/null
@@ -1,241 +0,0 @@
-parameters:
- helixSource: 'undefined_defaulted_in_telemetry.yml'
- helixType: 'undefined_defaulted_in_telemetry.yml'
- buildConfig: ''
- runAsPublic: false
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
- - task: AzureKeyVault@1
- inputs:
- azureSubscription: 'HelixProd_KeyVault'
- KeyVaultName: HelixProdKV
- SecretsFilter: 'HelixApiAccessToken'
- condition: always()
-- bash: |
- # create a temporary file
- jobInfo=`mktemp`
-
- # write job info content to temporary file
- cat > $jobInfo <:-fsanitize-address-use-after-return=never>)
- add_compile_options($<$:-fsanitize-address-use-after-return=never>)
+ add_compile_options($<$:-fsanitize-address-use-after-return=never>)
+ add_compile_options($<$:-fsanitize-address-use-after-return=never>)
endif()
endif()
@@ -300,7 +301,13 @@ elseif(CLR_CMAKE_HOST_SUNOS)
add_definitions(-D__EXTENSIONS__ -D_XPG4_2 -D_POSIX_PTHREAD_SEMANTICS)
elseif(CLR_CMAKE_HOST_OSX AND NOT CLR_CMAKE_HOST_MACCATALYST AND NOT CLR_CMAKE_HOST_IOS AND NOT CLR_CMAKE_HOST_TVOS)
add_definitions(-D_XOPEN_SOURCE)
- add_linker_flag("-Wl,-bind_at_load")
+
+ # the new linker in Xcode 15 (ld_new/ld_prime) deprecated the -bind_at_load flag for macOS which causes a warning
+ # that fails the build since we build with -Werror. Only pass the flag if we need it, i.e. older linkers.
+ check_linker_flag(C "-Wl,-bind_at_load,-fatal_warnings" LINKER_SUPPORTS_BIND_AT_LOAD_FLAG)
+ if(LINKER_SUPPORTS_BIND_AT_LOAD_FLAG)
+ add_linker_flag("-Wl,-bind_at_load")
+ endif()
elseif(CLR_CMAKE_HOST_HAIKU)
add_compile_options($<$:-Wa,--noexecstack>)
add_linker_flag("-Wl,--no-undefined")
@@ -662,11 +669,11 @@ if (CLR_CMAKE_HOST_UNIX)
set(DISABLE_OVERRIDING_MIN_VERSION_ERROR -Wno-overriding-t-option)
add_link_options(-Wno-overriding-t-option)
if(CLR_CMAKE_HOST_ARCH_ARM64)
- set(MACOS_VERSION_MIN_FLAGS "-target arm64-apple-ios14.2-macabi")
- add_link_options(-target arm64-apple-ios14.2-macabi)
+ set(MACOS_VERSION_MIN_FLAGS "-target arm64-apple-ios15.0-macabi")
+ add_link_options(-target arm64-apple-ios15.0-macabi)
elseif(CLR_CMAKE_HOST_ARCH_AMD64)
- set(MACOS_VERSION_MIN_FLAGS "-target x86_64-apple-ios13.5-macabi")
- add_link_options(-target x86_64-apple-ios13.5-macabi)
+ set(MACOS_VERSION_MIN_FLAGS "-target x86_64-apple-ios15.0-macabi")
+ add_link_options(-target x86_64-apple-ios15.0-macabi)
else()
clr_unknown_arch()
endif()
@@ -679,11 +686,10 @@ if (CLR_CMAKE_HOST_UNIX)
set(CMAKE_OBJC_FLAGS "${CMAKE_OBJC_FLAGS} ${MACOS_VERSION_MIN_FLAGS} ${DISABLE_OVERRIDING_MIN_VERSION_ERROR}")
set(CMAKE_OBJCXX_FLAGS "${CMAKE_OBJCXX_FLAGS} ${MACOS_VERSION_MIN_FLAGS} ${DISABLE_OVERRIDING_MIN_VERSION_ERROR}")
elseif(CLR_CMAKE_HOST_OSX)
+ set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
if(CLR_CMAKE_HOST_ARCH_ARM64)
- set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
add_compile_options(-arch arm64)
elseif(CLR_CMAKE_HOST_ARCH_AMD64)
- set(CMAKE_OSX_DEPLOYMENT_TARGET "10.15")
add_compile_options(-arch x86_64)
else()
clr_unknown_arch()
diff --git a/eng/native/configureplatform.cmake b/eng/native/configureplatform.cmake
index c7c378ab0e41b3..20851f8617423d 100644
--- a/eng/native/configureplatform.cmake
+++ b/eng/native/configureplatform.cmake
@@ -429,7 +429,6 @@ endif(CLR_CMAKE_TARGET_OS STREQUAL haiku)
if(CLR_CMAKE_TARGET_OS STREQUAL emscripten)
set(CLR_CMAKE_TARGET_UNIX 1)
- set(CLR_CMAKE_TARGET_LINUX 1)
set(CLR_CMAKE_TARGET_BROWSER 1)
endif(CLR_CMAKE_TARGET_OS STREQUAL emscripten)
diff --git a/eng/native/gen-buildsys.cmd b/eng/native/gen-buildsys.cmd
index f67f17b8d47942..79db6bffae062d 100644
--- a/eng/native/gen-buildsys.cmd
+++ b/eng/native/gen-buildsys.cmd
@@ -62,7 +62,7 @@ if /i "%__Arch%" == "wasm" (
if /i "%__Os%" == "wasi" (
if "%WASI_SDK_PATH%" == "" (
if not exist "%__repoRoot%\src\mono\wasi\wasi-sdk" (
- echo Error: Should set WASI_SDK_PATH environment variable pointing to emsdk root.
+ echo Error: Should set WASI_SDK_PATH environment variable pointing to WASI SDK root.
exit /B 1
)
diff --git a/eng/pipelines/common/evaluate-default-paths.yml b/eng/pipelines/common/evaluate-default-paths.yml
index d954a1ddacbb57..975c18eb69d464 100644
--- a/eng/pipelines/common/evaluate-default-paths.yml
+++ b/eng/pipelines/common/evaluate-default-paths.yml
@@ -164,6 +164,10 @@ jobs:
- src/tools/illink/*
- global.json
+ - subset: tools_cdacreader
+ include:
+ - src/native/managed/cdacreader/*
+
- subset: installer
include:
exclude:
diff --git a/eng/pipelines/common/global-build-job.yml b/eng/pipelines/common/global-build-job.yml
index 1812fc318bb2ee..8273417557052b 100644
--- a/eng/pipelines/common/global-build-job.yml
+++ b/eng/pipelines/common/global-build-job.yml
@@ -34,6 +34,7 @@ parameters:
extraVariablesTemplates: []
preBuildSteps: []
templatePath: 'templates'
+ templateContext: ''
jobs:
- template: /eng/common/${{ parameters.templatePath }}/job/job.yml
@@ -51,6 +52,9 @@ jobs:
enablePublishTestResults: ${{ parameters.enablePublishTestResults }}
testResultsFormat: ${{ parameters.testResultsFormat }}
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
artifacts:
publish:
logs:
diff --git a/eng/pipelines/common/platform-matrix.yml b/eng/pipelines/common/platform-matrix.yml
index cb3c8b2d0a65e2..2dc00a29da5d35 100644
--- a/eng/pipelines/common/platform-matrix.yml
+++ b/eng/pipelines/common/platform-matrix.yml
@@ -294,6 +294,25 @@ jobs:
helixQueueGroup: ${{ parameters.helixQueueGroup }}
${{ insert }}: ${{ parameters.jobParameters }}
+- ${{ if containsValue(parameters.platforms, 'linux_musl_x64_dev_innerloop') }}:
+ - template: xplat-setup.yml
+ parameters:
+ jobTemplate: ${{ parameters.jobTemplate }}
+ helixQueuesTemplate: ${{ parameters.helixQueuesTemplate }}
+ variables: ${{ parameters.variables }}
+ osGroup: linux
+ osSubgroup: _musl
+ archType: x64
+ targetRid: linux-musl-x64
+ platform: linux_musl_x64
+ shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
+ container: linux_musl_x64_dev_innerloop
+ jobParameters:
+ runtimeFlavor: ${{ parameters.runtimeFlavor }}
+ buildConfig: ${{ parameters.buildConfig }}
+ helixQueueGroup: ${{ parameters.helixQueueGroup }}
+ ${{ insert }}: ${{ parameters.jobParameters }}
+
# GCC Linux x64 Build
- ${{ if containsValue(parameters.platforms, 'gcc_linux_x64') }}:
diff --git a/eng/pipelines/common/templates/pipeline-with-resources.yml b/eng/pipelines/common/templates/pipeline-with-resources.yml
index b9db26f6cb151c..62f8434f8335c0 100644
--- a/eng/pipelines/common/templates/pipeline-with-resources.yml
+++ b/eng/pipelines/common/templates/pipeline-with-resources.yml
@@ -17,7 +17,7 @@ extends:
containers:
linux_arm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-net9.0
env:
ROOTFS_DIR: /crossrootfs/arm
@@ -33,17 +33,17 @@ extends:
ROOTFS_DIR: /crossrootfs/arm64
linux_musl_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net9.0
env:
ROOTFS_DIR: /crossrootfs/x64
linux_musl_arm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-alpine-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-alpine-net9.0
env:
ROOTFS_DIR: /crossrootfs/arm
linux_musl_arm64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-alpine-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-alpine-net9.0
env:
ROOTFS_DIR: /crossrootfs/arm64
@@ -56,18 +56,21 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-android-docker
linux_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0
env:
ROOTFS_DIR: /crossrootfs/x64
linux_x86:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-x86-net8.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-x86-net9.0
env:
ROOTFS_DIR: /crossrootfs/x86
linux_x64_dev_innerloop:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04
+ linux_musl_x64_dev_innerloop:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-WithNode
+
# We use a CentOS Stream 8 image here to test building from source on CentOS Stream 8.
SourceBuild_centos_x64:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8
@@ -77,12 +80,12 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:almalinux-8-source-build
linux_s390x:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-s390x
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-s390x
env:
ROOTFS_DIR: /crossrootfs/s390x
linux_ppc64le:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-ppc64le
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-ppc64le
env:
ROOTFS_DIR: /crossrootfs/ppc64le
@@ -121,4 +124,4 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-debpkg
rpmpkg:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-fpm
\ No newline at end of file
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-fpm
diff --git a/eng/pipelines/common/templates/publish-pipeline-artifacts.yml b/eng/pipelines/common/templates/publish-pipeline-artifacts.yml
deleted file mode 100644
index 81f292ec5528ce..00000000000000
--- a/eng/pipelines/common/templates/publish-pipeline-artifacts.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-parameters:
-- name: displayName
- type: string
-- name: inputs
- type: object
-- name: isOfficialBuild
- type: boolean
-
-steps:
- - ${{ if parameters.isOfficialBuild }}:
- - task: 1ES.PublishPipelineArtifact@1
- displayName: ${{ parameters.displayName }}
- inputs: ${{ parameters.inputs }}
- - ${{ else }}:
- - task: PublishPipelineArtifact@1
- displayName: ${{ parameters.displayName }}
- inputs: ${{ parameters.inputs }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/runtimes/run-test-job.yml b/eng/pipelines/common/templates/runtimes/run-test-job.yml
index d6404617a3e1ad..0899b32fe524d9 100644
--- a/eng/pipelines/common/templates/runtimes/run-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/run-test-job.yml
@@ -537,6 +537,7 @@ jobs:
- jitosr_stress
- jitpartialcompilation_pgo
- jitoptrepeat
+ - jitrpolayout
${{ else }}:
scenarios:
- jitosr_stress
@@ -549,6 +550,7 @@ jobs:
- jitphysicalpromotion_full
- jitrlcse
- jitoptrepeat
+ - jitrpolayout
${{ if in(parameters.testGroup, 'jit-cfg') }}:
scenarios:
- jitcfg
diff --git a/eng/pipelines/common/templates/wasm-library-aot-tests.yml b/eng/pipelines/common/templates/wasm-library-aot-tests.yml
index 43b90a370450d1..2336f98401515a 100644
--- a/eng/pipelines/common/templates/wasm-library-aot-tests.yml
+++ b/eng/pipelines/common/templates/wasm-library-aot-tests.yml
@@ -1,7 +1,7 @@
parameters:
alwaysRun: false
extraBuildArgs: ''
- extraHelixArgs: ''
+ extraHelixArguments: ''
isExtraPlatformsBuild: false
isWasmOnlyBuild: false
buildAOTOnHelix: true
@@ -26,7 +26,7 @@ jobs:
isExtraPlatformsBuild: ${{ parameters.isExtraPlatformsBuild }}
isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
extraBuildArgs: /p:EnableAggressiveTrimming=true /p:BuildAOTTestsOnHelix=${{ parameters.buildAOTOnHelix }} /p:RunAOTCompilation=${{ parameters.runAOT }} ${{ parameters.extraBuildArgs }}
- extraHelixArgs: /p:NeedsToBuildWasmAppsOnHelix=true ${{ parameters.extraHelixArgs }}
+ extraHelixArguments: /p:NeedsToBuildWasmAppsOnHelix=true ${{ parameters.extraHelixArguments }}
alwaysRun: ${{ parameters.alwaysRun }}
shouldRunSmokeOnly: ${{ parameters.shouldRunSmokeOnly }}
shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
diff --git a/eng/pipelines/common/templates/wasm-library-tests.yml b/eng/pipelines/common/templates/wasm-library-tests.yml
index 4a1a5a79a30bfc..f015563fef00e3 100644
--- a/eng/pipelines/common/templates/wasm-library-tests.yml
+++ b/eng/pipelines/common/templates/wasm-library-tests.yml
@@ -1,7 +1,7 @@
parameters:
alwaysRun: false
extraBuildArgs: ''
- extraHelixArgs: ''
+ extraHelixArguments: ''
isExtraPlatformsBuild: false
isWasmOnlyBuild: false
nameSuffix: ''
@@ -97,5 +97,5 @@ jobs:
parameters:
creator: dotnet-bot
testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:BrowserHost=$(_hostedOs) $(_wasmRunSmokeTestsOnlyArg) ${{ parameters.extraHelixArgs }}
+ extraHelixArguments: /p:BrowserHost=$(_hostedOs) $(_wasmRunSmokeTestsOnlyArg) ${{ parameters.extraHelixArguments }}
scenarios: ${{ parameters.scenarios }}
diff --git a/eng/pipelines/common/xplat-setup.yml b/eng/pipelines/common/xplat-setup.yml
index 743f6a42531bcc..f50a2db9e81ec5 100644
--- a/eng/pipelines/common/xplat-setup.yml
+++ b/eng/pipelines/common/xplat-setup.yml
@@ -188,7 +188,6 @@ jobs:
name: $(DncEngPublicBuildPool)
demands: ImageOverride -equals windows.vs2022.amd64.open
-
${{ if eq(parameters.helixQueuesTemplate, '') }}:
# macOS hosted pool machines are slower so we need to give a greater timeout than the 60 mins default.
${{ if and(eq(parameters.jobParameters.timeoutInMinutes, ''), in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'tvos')) }}:
diff --git a/eng/pipelines/coreclr/nativeaot-post-build-steps.yml b/eng/pipelines/coreclr/nativeaot-post-build-steps.yml
index 94761028f48ab0..bc29a657c456ce 100644
--- a/eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+++ b/eng/pipelines/coreclr/nativeaot-post-build-steps.yml
@@ -21,11 +21,3 @@ steps:
nativeAotTest: true
helixQueues: ${{ parameters.helixQueues }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
-
- # Can't run arm/arm64 tests on x64 build machines
- - ${{ if and(ne(parameters.archType, 'arm'), ne(parameters.archType, 'arm64')) }}:
-
- # Publishing tooling doesn't support different configs between runtime and libs, so only run tests in Release config
- - ${{ if eq(parameters.buildConfig, 'release') }}:
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -ci -arch ${{ parameters.archType }} $(_osParameter) -s libs.tests -c $(_BuildConfig) $(crossArg) $(_nativeSanitizersArg) /p:TestAssemblies=false /p:RunNativeAotTestApps=true $(_officialBuildParameter) /bl:$(Build.SourcesDirectory)/artifacts/log/$(buildConfigUpper)/NativeAotTests.binlog ${{ parameters.extraTestArgs }}
- displayName: Run NativeAot Library Tests
diff --git a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
index c48103af929c81..574728cfed7eae 100644
--- a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
@@ -306,7 +306,7 @@ jobs:
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfviper'
- # run coreclr perfowl microbenchmarks perf gdv3 jobs
+ # run coreclr perfowl microbenchmarks perf rlcse jobs
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/perf-job.yml
@@ -322,9 +322,9 @@ jobs:
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfowl'
- experimentName: 'gdv3'
+ experimentName: 'rlcse'
- # run coreclr perfowl microbenchmarks perf rlcse jobs
+ # run coreclr perfowl microbenchmarks perf jitoptrepeat jobs
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/perf-job.yml
@@ -340,9 +340,9 @@ jobs:
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfowl'
- experimentName: 'rlcse'
+ experimentName: 'jitoptrepeat'
- # run coreclr perfowl microbenchmarks perf jitoptrepeat jobs
+ # run coreclr perfowl microbenchmarks perf rpolayout jobs
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/perf-job.yml
@@ -358,7 +358,7 @@ jobs:
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfowl'
- experimentName: 'jitoptrepeat'
+ experimentName: 'rpolayout'
# run coreclr crossgen perf job
- template: /eng/pipelines/common/platform-matrix.yml
diff --git a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
index e8bfd86cd81dd2..494601a890df12 100644
--- a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
+++ b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
@@ -69,7 +69,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Libs
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
timeoutInMinutes: 300 # doesn't normally take this long, but I've seen Helix queues backed up for 160 minutes
includeAllPlatforms: true
# extra steps, run tests
@@ -95,7 +95,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
timeoutInMinutes: 360
# extra steps, run tests
postBuildSteps:
@@ -120,7 +120,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs_SizeOpt
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Size /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Size /p:IlcUseServerGc=false /p:RunAnalyzers=false
timeoutInMinutes: 240
# extra steps, run tests
postBuildSteps:
@@ -145,7 +145,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs_SpeedOpt
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Speed /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Speed /p:IlcUseServerGc=false /p:RunAnalyzers=false
timeoutInMinutes: 240
# extra steps, run tests
postBuildSteps:
@@ -176,7 +176,7 @@ extends:
jobParameters:
timeoutInMinutes: 300 # doesn't normally take this long, but we have had Helix queues backed up for over an hour
nameSuffix: NativeAOT_Pri0
- buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs -rc $(_BuildConfig) -lc Release /p:RunAnalyzers=false
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
index 2c947a048e0550..a5c6cb414227d8 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
@@ -197,8 +197,8 @@ jobs:
isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
alwaysRun: true
scenarios:
- - WasmTestOnV8
- WasmTestOnChrome
+ - WasmTestOnFirefox
- WasmTestOnNodeJS
# Hybrid Globalization AOT tests
@@ -214,7 +214,6 @@ jobs:
isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
alwaysRun: true
scenarios:
- - WasmTestOnV8
- WasmTestOnChrome
- WasmTestOnNodeJS
@@ -321,4 +320,4 @@ jobs:
isExtraPlatformsBuild: ${{ parameters.isExtraPlatformsBuild }}
isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
scenarios:
- - WasmTestOnV8
+ - WasmTestOnWasmtime
diff --git a/eng/pipelines/global-build.yml b/eng/pipelines/global-build.yml
index 2e70a2448cd9d3..d7302fe253b6b9 100644
--- a/eng/pipelines/global-build.yml
+++ b/eng/pipelines/global-build.yml
@@ -1,6 +1,6 @@
-# The purpose of this pipeline is to exercise local developer workflow in the consolidated
-# runtime repo. In particular, it is supposed to run the root "build" script just like any
-# normal developer normally would and monitor regressions w.r.t. this fundamental scenario.
+# The purpose of this pipeline is to exercise various developer workflows in the repo.
+# Primarily, it is meant to cover local (non-cross) build scenarios and
+# source-build scenarios that commonly cause build breaks.
trigger: none
@@ -41,28 +41,6 @@ extends:
- stage: Build
jobs:
- #
- # Build with Release config and Debug runtimeConfiguration
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: release
- platforms:
- - windows_x86
- - osx_x64
- - osx_arm64
- jobParameters:
- testGroup: innerloop
- nameSuffix: Runtime_Debug
- buildArgs: -c release -runtimeConfiguration debug
- timeoutInMinutes: 120
- condition:
- or(
- eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
- eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
- eq(variables['isRollingBuild'], true))
-
#
# Build with Release config and runtimeConfiguration with MSBuild generator
#
@@ -83,26 +61,6 @@ extends:
eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
eq(variables['isRollingBuild'], true))
- #
- # Build with Debug config and Release runtimeConfiguration
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: debug
- platforms:
- - linux_x64_dev_innerloop
- jobParameters:
- testGroup: innerloop
- nameSuffix: Runtime_Release
- buildArgs: -c debug -runtimeConfiguration release
- timeoutInMinutes: 120
- condition:
- or(
- eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
- eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
- eq(variables['isRollingBuild'], true))
-
#
# Build with RuntimeFlavor only. This exercise code paths where only RuntimeFlavor is
# specified. Catches cases where we depend on Configuration also being specified
@@ -124,38 +82,37 @@ extends:
eq(variables['isRollingBuild'], true))
#
- # Build Mono + Libraries. This exercises the code path where we build libraries without
- # first building CoreCLR
+ # Build Libraries AllConfigurations. This exercises the code path where we build libraries for all
+ # configurations on a non Windows operating system.
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: debug
platforms:
- - windows_x64
+ - linux_x64_dev_innerloop
jobParameters:
- testGroup: innerloop
- nameSuffix: Mono_Libraries
- buildArgs: -subset mono+libs /p:RuntimeFlavor=Mono
+ nameSuffix: Libraries_AllConfigurations
+ buildArgs: -subset libs -allconfigurations
timeoutInMinutes: 120
condition:
or(
- eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_non_wasm.containsChange'], true),
+ eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
eq(variables['isRollingBuild'], true))
#
- # Build Libraries AllConfigurations. This exercises the code path where we build libraries for all
- # configurations on a non Windows operating system.
+ # Build native assets on Alpine. This exercises more modern musl libc changes that have a tendendy to break source-build.
+ # We don't add this as a source-build job as the repo source-build infrastructure isn't set up to run on alpine effectively.
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: debug
+ buildConfig: release
platforms:
- - linux_x64_dev_innerloop
+ - linux_musl_x64_dev_innerloop
jobParameters:
- nameSuffix: Libraries_AllConfigurations
- buildArgs: -subset libs -allconfigurations
+ nameSuffix: Musl_Validation
+ buildArgs: -subset clr.native+libs.native+host.native -c $(_BuildConfig)
timeoutInMinutes: 120
condition:
or(
diff --git a/eng/pipelines/libraries/execute-trimming-tests-steps.yml b/eng/pipelines/libraries/execute-trimming-tests-steps.yml
index 567abab0bb984e..13778ee0ab63a5 100644
--- a/eng/pipelines/libraries/execute-trimming-tests-steps.yml
+++ b/eng/pipelines/libraries/execute-trimming-tests-steps.yml
@@ -1,8 +1,14 @@
parameters:
archType: ''
extraTestArgs: ''
+ runAotTests: true
steps:
# Execute tests
- script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -ci -arch ${{ parameters.archType }} $(_osParameter) -s libs.tests -c $(_BuildConfig) $(crossArg) /p:TestAssemblies=false /p:TestTrimming=true $(_officialBuildParameter) /bl:$(Build.SourcesDirectory)/artifacts/log/$(buildConfigUpper)/TrimmingTests.binlog ${{ parameters.extraTestArgs }}
displayName: Run Trimming Tests
+
+ # Execute AOT test app tests
+ - ${{ if eq(parameters.runAotTests, true) }}:
+ - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -ci -arch ${{ parameters.archType }} $(_osParameter) -s libs.tests -c $(_BuildConfig) $(crossArg) /p:TestAssemblies=false /p:RunNativeAotTestApps=true $(_officialBuildParameter) /bl:$(Build.SourcesDirectory)/artifacts/log/$(buildConfigUpper)/NativeAotTestAppTests.binlog ${{ parameters.extraTestArgs }}
+ displayName: Run Native AOT Test App Tests
diff --git a/eng/pipelines/mono/templates/generate-offsets.yml b/eng/pipelines/mono/templates/generate-offsets.yml
deleted file mode 100644
index 8d8d781dd3262e..00000000000000
--- a/eng/pipelines/mono/templates/generate-offsets.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-parameters:
- buildConfig: 'Debug'
- osGroup: ''
- osSubGroup: ''
- platform: ''
- container: ''
- timeoutInMinutes: ''
- variables: {}
- pool: ''
- condition: true
- isOfficialBuild: false
- templatePath: 'templates'
-
-### Product build
-jobs:
-- template: /eng/pipelines/common/templates/runtimes/xplat-job.yml
- parameters:
- templatePath: ${{ parameters.templatePath }}
- buildConfig: ${{ parameters.buildConfig }}
- osGroup: ${{ parameters.osGroup }}
- osSubGroup: ${{ parameters.osSubGroup }}
- helixType: 'build/product/'
- enableMicrobuild: true
- pool: ${{ parameters.pool }}
- condition: ${{ parameters.condition }}
- dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
- logsName: 'BuildLogs_Attempt$(System.JobAttempt)_Mono_Offsets_$(osGroup)$(osSubGroup)'
-
- # Compute job name from template parameters
- name: ${{ format('mono_{0}{1}_offsets', parameters.osGroup, parameters.osSubGroup) }}
- displayName: ${{ format('Mono {0}{1} AOT offsets', parameters.osGroup, parameters.osSubGroup) }}
-
- # Run all steps in the container.
- # Note that the containers are defined in platform-matrix.yml
- container: ${{ parameters.container }}
-
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- gatherAssetManifests: true
- variables:
- - name: osGroup
- value: ${{ parameters.osGroup }}
- - name: osSubGroup
- value: ${{ parameters.osSubGroup }}
- - name: officialBuildIdArg
- value: ''
- - ${{ if eq(parameters.isOfficialBuild, true) }}:
- - name: officialBuildIdArg
- value: '/p:OfficialBuildId=$(Build.BuildNumber)'
- - name: osOverride
- value: -os linux
- - name: archType
- value: x64
- - ${{ parameters.variables }}
-
- steps:
-
- # Install native dependencies
- # Linux builds use docker images with dependencies preinstalled,
- # and FreeBSD builds use a build agent with dependencies
- # preinstalled, so we only need this step for OSX and Windows.
- - ${{ if in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator') }}:
- - script: $(Build.SourcesDirectory)/eng/install-native-dependencies.sh $(osGroup)
- displayName: Install native dependencies
-
- # Build
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - script: ./build$(scriptExt) -subset mono.aotcross -c $(buildConfig) -arch $(archType) $(osOverride) -ci $(officialBuildIdArg) /p:MonoGenerateOffsetsOSGroups=$(osGroup)
- displayName: Generate AOT offsets
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- - script: build$(scriptExt) -subset mono.aotcross -c $(buildConfig) -arch $(archType) $(osOverride) -ci $(officialBuildIdArg) /p:MonoGenerateOffsetsOSGroups=$(osGroup)
- displayName: Generate AOT offsets
-
- # Upload offset files
- - task: CopyFiles@2
- displayName: Collect offset files
- inputs:
- sourceFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/'
- contents: '**/offsets-*.h'
- targetFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles/'
-
- - template: /eng/pipelines/common/templates/publish-pipeline-artifacts.yml
- parameters:
- displayName: Upload offset files
- isOfficialBuild: ${{ parameters.isOfficialBuild }}
- inputs:
- targetPath: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
- artifactName: 'Mono_Offsets_$(osGroup)$(osSubGroup)'
diff --git a/eng/pipelines/mono/templates/workloads-build.yml b/eng/pipelines/mono/templates/workloads-build.yml
deleted file mode 100644
index 89404db6b29201..00000000000000
--- a/eng/pipelines/mono/templates/workloads-build.yml
+++ /dev/null
@@ -1,117 +0,0 @@
-parameters:
- archType: ''
- buildConfig: ''
- container: ''
- dependsOn: []
- isOfficialBuild: false
- osGroup: ''
- osSubgroup: ''
- platform: ''
- pool: ''
- runtimeVariant: ''
- testGroup: ''
- timeoutInMinutes: ''
- templatePath: 'templates'
- variables: {}
-
-jobs:
-- template: /eng/pipelines/common/templates/runtimes/xplat-job.yml
- parameters:
- templatePath: ${{ parameters.templatePath }}
- archType: ${{ parameters.archType }}
- buildConfig: ${{ parameters.buildConfig }}
- container: ${{ parameters.container }}
- condition: and(succeeded(), ${{ parameters.isOfficialBuild }})
- helixType: 'build/product/'
- osGroup: ${{ parameters.osGroup }}
- osSubgroup: ${{ parameters.osSubgroup }}
- pool: ${{ parameters.pool }}
- runtimeVariant: ${{ parameters.runtimeVariant }}
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
- logsName: WorkloadLogs_Attempt$(System.JobAttempt)
-
- dependsOn: ${{ parameters.dependsOn }}
-
- name: workloadsbuild
- displayName: Build Workloads
-
- variables:
- - name: officialBuildIdArg
- value: ''
- - ${{ if eq(parameters.isOfficialBuild, true) }}:
- - name: officialBuildIdArg
- value: '/p:OfficialBuildId=$(Build.BuildNumber)'
- - name: SignType
- value: $[ coalesce(variables.OfficialSignType, 'real') ]
- - name: workloadPackagesPath
- value: $(Build.SourcesDirectory)/artifacts/workloadPackages
- - name: workloadArtifactsPath
- value: $(Build.SourcesDirectory)/artifacts/workloads
- - ${{ parameters.variables }}
-
- steps:
- - task: DownloadPipelineArtifact@2
- inputs:
- artifact: 'IntermediateArtifacts'
- path: $(workloadPackagesPath)
- patterns: |
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.android-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.android-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.browser-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.browser-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.wasi-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.wasi-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.android-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.browser-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.multithread.browser-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.ios-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.iossimulator-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.maccatalyst-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.multithread.browser-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.tvos-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.tvossimulator-*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.wasi-wasm*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.Current.Manifest*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net6.Manifest*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net7.Manifest*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net8.Manifest*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.MonoTargets.Sdk*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.MonoAOTCompiler.Task*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Sdk*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Wasi*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Templates*.nupkg
- IntermediateArtifacts/windows_arm64/Shipping/Microsoft.NETCore.App.Runtime.win-arm64*.nupkg
- IntermediateArtifacts/windows_x64/Shipping/Microsoft.NETCore.App.Runtime.win-x64*.nupkg
- IntermediateArtifacts/windows_x86/Shipping/Microsoft.NETCore.App.Runtime.win-x86*.nupkg
- IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Sdk.WebAssembly.Pack*.nupkg
-
- - task: CopyFiles@2
- displayName: Flatten packages
- inputs:
- sourceFolder: $(workloadPackagesPath)
- contents: '*/Shipping/*.nupkg'
- cleanTargetFolder: false
- targetFolder: $(workloadPackagesPath)
- flattenFolders: true
-
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -subset mono.workloads -arch $(archType) -c $(buildConfig) $(officialBuildIdArg) -ci
- displayName: Build workload artifacts
-
- # Upload packages wrapping msis
- - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- parameters:
- name: workloads
-
- # Delete wixpdb files before they are uploaded to artifacts
- - task: DeleteFiles@1
- displayName: Delete wixpdb's
- inputs:
- SourceFolder: $(workloadArtifactsPath)
- Contents: '*.wixpdb'
-
- # Upload artifacts to be used for generating VS components
- - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- parameters:
- name: workloads-vs
- publishPackagesCondition: false
- publishVSSetupCondition: true
diff --git a/eng/pipelines/official/jobs/prepare-signed-artifacts.yml b/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
index 24fd2df48d74be..eb25d311890a98 100644
--- a/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
+++ b/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
@@ -1,76 +1,65 @@
parameters:
- dependsOn: []
PublishRidAgnosticPackagesFromPlatform: ''
isOfficialBuild: false
logArtifactName: 'Logs-PrepareSignedArtifacts_Attempt$(System.JobAttempt)'
jobs:
-- job: PrepareSignedArtifacts
- displayName: Prepare Signed Artifacts
- dependsOn: ${{ parameters.dependsOn }}
- pool:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals 1es-windows-2022
- # Double the default timeout.
- timeoutInMinutes: 240
- workspace:
- clean: all
+- template: /eng/common/templates-official/job/job.yml
+ parameters:
+ name: 'PrepareSignedArtifacts'
+ displayName: 'Prepare Signed Artifacts'
- variables:
- - name: SignType
- value: $[ coalesce(variables.OfficialSignType, 'real') ]
+ pool:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals 1es-windows-2022
- templateContext:
- outputs:
- - output: pipelineArtifact
- displayName: 'Publish BuildLogs'
- condition: succeededOrFailed()
- targetPath: '$(Build.StagingDirectory)\BuildLogs'
- artifactName: ${{ parameters.logArtifactName }}
+ # Double the default timeout.
+ timeoutInMinutes: 240
- steps:
- - checkout: self
- clean: true
- fetchDepth: 20
+ workspace:
+ clean: all
- - ${{ if eq(parameters.isOfficialBuild, true) }}:
- - task: NuGetAuthenticate@1
+ enableMicrobuild: true
- - task: MicroBuildSigningPlugin@2
- displayName: Install MicroBuild plugin for Signing
- inputs:
- signType: $(SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- continueOnError: false
- condition: and(succeeded(),
- in(variables['SignType'], 'real', 'test'))
-
- - task: DownloadBuildArtifacts@0
- displayName: Download IntermediateArtifacts
- inputs:
- artifactName: IntermediateArtifacts
- downloadPath: $(Build.SourcesDirectory)\artifacts\PackageDownload
- checkDownloadedFiles: true
-
- - script: >-
- build.cmd -ci
- -subset publish
- -configuration Release
- /p:PublishRidAgnosticPackagesFromPlatform=${{ parameters.PublishRidAgnosticPackagesFromPlatform }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- /p:SignType=$(SignType)
- /p:DotNetSignType=$(SignType)
- /bl:$(Build.SourcesDirectory)\prepare-artifacts.binlog
- displayName: Prepare artifacts and upload to build
-
- - task: CopyFiles@2
- displayName: Copy Files to $(Build.StagingDirectory)\BuildLogs
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- TargetFolder: '$(Build.StagingDirectory)\BuildLogs'
- continueOnError: true
- condition: succeededOrFailed()
\ No newline at end of file
+ variables:
+ - name: '_SignType'
+ value: $[ coalesce(variables.OfficialSignType, 'real') ]
+
+ templateContext:
+ inputs:
+ - input: checkout
+ repository: self
+ clean: true
+ fetchDepth: 20
+ - input: pipelineArtifact
+ artifactName: IntermediateArtifacts
+ targetPath: $(Build.SourcesDirectory)\artifacts\PackageDownload\IntermediateArtifacts
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish BuildLogs'
+ condition: succeededOrFailed()
+ targetPath: '$(Build.StagingDirectory)\BuildLogs'
+ artifactName: ${{ parameters.logArtifactName }}
+
+ steps:
+ - script: >-
+ build.cmd -ci
+ -subset publish
+ -configuration Release
+ /p:PublishRidAgnosticPackagesFromPlatform=${{ parameters.PublishRidAgnosticPackagesFromPlatform }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ /p:SignType=$(_SignType)
+ /p:DotNetSignType=$(_SignType)
+ /bl:$(Build.SourcesDirectory)\prepare-artifacts.binlog
+ displayName: Prepare artifacts and upload to build
+
+ - task: CopyFiles@2
+ displayName: Copy Files to $(Build.StagingDirectory)\BuildLogs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ TargetFolder: '$(Build.StagingDirectory)\BuildLogs'
+ continueOnError: true
+ condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/pipelines/runtime-linker-tests.yml b/eng/pipelines/runtime-linker-tests.yml
index aec5e1057ac538..2b001b769f50f9 100644
--- a/eng/pipelines/runtime-linker-tests.yml
+++ b/eng/pipelines/runtime-linker-tests.yml
@@ -135,3 +135,4 @@ extends:
- template: /eng/pipelines/libraries/execute-trimming-tests-steps.yml
parameters:
extraTestArgs: '/p:WasmBuildNative=false'
+ runAotTests: false
diff --git a/eng/pipelines/runtime-official.yml b/eng/pipelines/runtime-official.yml
index 23f19405abea7c..cf856d94df6164 100644
--- a/eng/pipelines/runtime-official.yml
+++ b/eng/pipelines/runtime-official.yml
@@ -381,11 +381,12 @@ extends:
parameters:
name: MonoRuntimePacks
+
# Build Mono AOT offset headers once, for consumption elsewhere
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/mono/templates/generate-offsets.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
platforms:
- android_x64
@@ -394,9 +395,31 @@ extends:
- tvos_arm64
- ios_arm64
- maccatalyst_x64
+ variables:
+ - name: _osParameter
+ value: -os linux
+ - name: _archParameter
+ value: -arch x64
jobParameters:
templatePath: 'templates-official'
- isOfficialBuild: ${{ variables.isOfficialBuild }}
+ nameSuffix: MonoAOTOffsets
+ buildArgs: -s mono.aotcross -c $(_BuildConfig) /p:MonoGenerateOffsetsOSGroups=$(osGroup)
+ postBuildSteps:
+ # Upload offset files
+ - task: CopyFiles@2
+ displayName: Collect offset files
+ inputs:
+ sourceFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/'
+ contents: '**/offsets-*.h'
+ targetFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles/'
+
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish Mono AOT offsets'
+ condition: succeeded()
+ targetPath: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
+ artifactName: 'Mono_Offsets_$(osGroup)$(osSubGroup)'
#
# Build Mono release AOT cross-compilers
@@ -418,9 +441,9 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
@@ -446,9 +469,9 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
@@ -474,12 +497,12 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
- - mono_tvos_offsets
- - mono_ios_offsets
- - mono_maccatalyst_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
+ - build_tvos_arm64_release_MonoAOTOffsets
+ - build_ios_arm64_release_MonoAOTOffsets
+ - build_maccatalyst_x64_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
@@ -599,12 +622,80 @@ extends:
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/mono/templates/workloads-build.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
platforms:
- windows_x64
jobParameters:
templatePath: 'templates-official'
+ nameSuffix: Workloads
+ preBuildSteps:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ artifact: 'IntermediateArtifacts'
+ path: $(Build.SourcesDirectory)/artifacts/workloadPackages
+ patterns: |
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.android-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.android-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.browser-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.browser-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross.wasi-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.AOT.win-arm64.Cross.wasi-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.android-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.browser-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.multithread.browser-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.ios-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.iossimulator-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.maccatalyst-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.multithread.browser-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.tvos-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.tvossimulator-*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NETCore.App.Runtime.Mono.wasi-wasm*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.Current.Manifest*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net6.Manifest*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net7.Manifest*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Workload.Mono.ToolChain.net8.Manifest*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.MonoTargets.Sdk*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.MonoAOTCompiler.Task*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Sdk*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Wasi*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Runtime.WebAssembly.Templates*.nupkg
+ IntermediateArtifacts/windows_arm64/Shipping/Microsoft.NETCore.App.Runtime.win-arm64*.nupkg
+ IntermediateArtifacts/windows_x64/Shipping/Microsoft.NETCore.App.Runtime.win-x64*.nupkg
+ IntermediateArtifacts/windows_x86/Shipping/Microsoft.NETCore.App.Runtime.win-x86*.nupkg
+ IntermediateArtifacts/MonoRuntimePacks/Shipping/Microsoft.NET.Sdk.WebAssembly.Pack*.nupkg
+
+ - task: CopyFiles@2
+ displayName: Flatten packages
+ inputs:
+ sourceFolder: $(Build.SourcesDirectory)/artifacts/workloadPackages
+ contents: '*/Shipping/*.nupkg'
+ cleanTargetFolder: false
+ targetFolder: $(Build.SourcesDirectory)/artifacts/workloadPackages
+ flattenFolders: true
+
+ buildArgs: -s mono.workloads -c $(_BuildConfig) /p:PackageSource=$(Build.SourcesDirectory)/artifacts/workloadPackages /p:WorkloadOutputPath=$(Build.SourcesDirectory)/artifacts/workloads
+
+ postBuildSteps:
+ # Upload packages wrapping msis
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: workloads
+
+ # Delete wixpdb files before they are uploaded to artifacts
+ - task: DeleteFiles@1
+ displayName: Delete wixpdb's
+ inputs:
+ SourceFolder: $(Build.SourcesDirectory)/artifacts/workloads
+ Contents: '*.wixpdb'
+
+ # Upload artifacts to be used for generating VS components
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: workloads-vs
+ publishPackagesCondition: false
+ publishVSSetupCondition: always()
+
isOfficialBuild: ${{ variables.isOfficialBuild }}
timeoutInMinutes: 120
dependsOn:
diff --git a/eng/pipelines/runtime-sanitized.yml b/eng/pipelines/runtime-sanitized.yml
index ed0334b61b3e9d..df461c03b7a70a 100644
--- a/eng/pipelines/runtime-sanitized.yml
+++ b/eng/pipelines/runtime-sanitized.yml
@@ -133,7 +133,7 @@ extends:
testGroup: innerloop
timeoutInMinutes: 120
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release $(_nativeSanitizersArg)
+ buildArgs: -s clr.aot+libs -rc $(_BuildConfig) -lc Release $(_nativeSanitizersArg)
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
diff --git a/eng/pipelines/runtime.yml b/eng/pipelines/runtime.yml
index b4ed2afd252d23..cb7aadde9fab6d 100644
--- a/eng/pipelines/runtime.yml
+++ b/eng/pipelines/runtime.yml
@@ -498,7 +498,7 @@ extends:
jobParameters:
testScope: innerloop
nameSuffix: CoreCLR_NonPortable
- buildArgs: -s clr.native+clr.tools+clr.corelib+clr.nativecorelib+clr.aot+clr.packages -c $(_BuildConfig) /p:PortableBuild=false
+ buildArgs: -s clr.native+clr.tools+clr.corelib+clr.nativecorelib+clr.aot+clr.packages --outputrid tizen.9.0.0-armel -c $(_BuildConfig) /p:PortableBuild=false
timeoutInMinutes: 120
condition: >-
or(
@@ -567,7 +567,7 @@ extends:
jobParameters:
timeoutInMinutes: 120
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs -rc $(_BuildConfig) -lc Release /p:RunAnalyzers=false
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -606,7 +606,7 @@ extends:
jobParameters:
timeoutInMinutes: 180
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs.native+libs.sfx -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs.native+libs.sfx -rc $(_BuildConfig) -lc Release /p:RunAnalyzers=false
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -651,7 +651,7 @@ extends:
testGroup: innerloop
timeoutInMinutes: 120
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs+tools.illink -c $(_BuildConfig) -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+tools.illink -c $(_BuildConfig) -rc $(_BuildConfig) -lc Release /p:RunAnalyzers=false
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -688,7 +688,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Libraries
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:RunSmokeTestsOnly=true /p:ArchiveTests=true /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:RunSmokeTestsOnly=true /p:ArchiveTests=true /p:RunAnalyzers=false
timeoutInMinutes: 240 # Doesn't actually take long, but we've seen the ARM64 Helix queue often get backlogged for 2+ hours
# extra steps, run tests
postBuildSteps:
@@ -713,7 +713,7 @@ extends:
jobParameters:
timeoutInMinutes: 120
nameSuffix: CLR_Tools_Tests
- buildArgs: -s clr.aot+clr.iltools+libs.sfx+clr.toolstests -c $(_BuildConfig) -test
+ buildArgs: -s clr.aot+clr.iltools+libs.sfx+clr.toolstests+tools.cdacreadertests -c $(_BuildConfig) -test
enablePublishTestResults: true
testResultsFormat: 'xunit'
# We want to run AOT tests when illink changes because there's share code and tests from illink which are used by AOT
@@ -721,6 +721,7 @@ extends:
or(
eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_tools_illink.containsChange'], true),
+ eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_tools_cdacreader.containsChange'], true),
eq(variables['isRollingBuild'], true))
#
# Build CrossDacs
@@ -753,8 +754,7 @@ extends:
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/mono/templates/generate-offsets.yml
- templatePath: 'templates'
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
platforms:
- android_x64
@@ -763,8 +763,26 @@ extends:
- tvos_arm64
- ios_arm64
- maccatalyst_x64
+ variables:
+ - name: _osParameter
+ value: -os linux
+ - name: _archParameter
+ value: -arch x64
jobParameters:
- isOfficialBuild: false
+ nameSuffix: MonoAOTOffsets
+ buildArgs: -s mono.aotcross -c $(_BuildConfig) /p:MonoGenerateOffsetsOSGroups=$(osGroup)
+ postBuildSteps:
+ # Upload offset files
+ - task: CopyFiles@2
+ displayName: Collect offset files
+ inputs:
+ sourceFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/'
+ contents: '**/offsets-*.h'
+ targetFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles/'
+
+ - publish: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
+ artifact: Mono_Offsets_$(osGroup)$(osSubGroup)
+ displayName: Upload offset files
# needed by crossaot
condition: >-
or(
@@ -825,6 +843,7 @@ extends:
scenarios:
- WasmTestOnV8
- WasmTestOnChrome
+ - WasmTestOnFirefox
- template: /eng/pipelines/common/templates/wasm-library-tests.yml
parameters:
@@ -843,10 +862,12 @@ extends:
#- browser_wasm_win
nameSuffix: _Threading
extraBuildArgs: /p:WasmEnableThreads=true /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
+ extraHelixArguments: /p:WasmEnableThreads=true
alwaysRun: ${{ variables.isRollingBuild }}
shouldRunSmokeOnly: onLibrariesAndIllinkChanges
scenarios:
- WasmTestOnChrome
+ - WasmTestOnFirefox
#- WasmTestOnNodeJS - this is not supported yet, https://github.com/dotnet/runtime/issues/85592
# EAT Library tests - only run on linux
@@ -867,7 +888,6 @@ extends:
- browser_wasm
- browser_wasm_win
- wasi_wasm
- - wasi_wasm_win
nameSuffix: _Smoke_AOT
runAOT: true
shouldRunSmokeOnly: true
@@ -1245,9 +1265,9 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
@@ -1272,9 +1292,9 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
@@ -1303,12 +1323,12 @@ extends:
nameSuffix: CrossAOT_Mono
runtimeVariant: crossaot
dependsOn:
- - mono_android_offsets
- - mono_browser_offsets
- - mono_wasi_offsets
- - mono_tvos_offsets
- - mono_ios_offsets
- - mono_maccatalyst_offsets
+ - build_android_x64_release_MonoAOTOffsets
+ - build_browser_wasm_linux_release_MonoAOTOffsets
+ - build_wasi_wasm_linux_release_MonoAOTOffsets
+ - build_tvos_arm64_release_MonoAOTOffsets
+ - build_ios_arm64_release_MonoAOTOffsets
+ - build_maccatalyst_x64_release_MonoAOTOffsets
monoCrossAOTTargetOS:
- android
- browser
diff --git a/eng/pipelines/runtimelab-official.yml b/eng/pipelines/runtimelab-official.yml
new file mode 100644
index 00000000000000..84a8bd69258d5c
--- /dev/null
+++ b/eng/pipelines/runtimelab-official.yml
@@ -0,0 +1,78 @@
+trigger:
+ batch: true
+ branches:
+ include:
+ - feature/*
+ paths:
+ include:
+ - '*'
+ exclude:
+ - '**.md'
+ - eng/Version.Details.xml
+ - .devcontainer/*
+ - .github/*
+ - docs/*
+ - LICENSE.TXT
+ - PATENTS.TXT
+ - THIRD-PARTY-NOTICES.TXT
+
+variables:
+- template: /eng/pipelines/common/variables.yml
+ parameters:
+ templatePath: 'templates-official'
+
+- ${{ if and(ne(variables['System.TeamProject'], 'public'), ne(variables['Build.Reason'], 'PullRequest')) }}:
+ - name: TeamName
+ value: dotnet-core
+extends:
+ template: /eng/pipelines/common/templates/pipeline-with-resources.yml@self
+ parameters:
+ isOfficialBuild: true
+ stages:
+ - stage: Build
+ jobs:
+ #
+ # Build the whole product with Release CoreCLR
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
+ buildConfig: release
+ platforms:
+ - linux_x64
+ - windows_x64
+ jobParameters:
+ templatePath: 'templates-official'
+ isOfficialBuild: true
+ timeoutInMinutes: 180
+ buildArgs: -s clr+libs+hosts+packs -c $(_BuildConfig)
+ postBuildSteps:
+ # Upload the results.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: $(osGroup)$(osSubgroup)_$(archType)
+
+ #
+ # Build libraries AllConfigurations for packages
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: Release
+ platforms:
+ - windows_x64
+ jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s tools+libs -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
+ nameSuffix: Libraries_AllConfigurations
+ isOfficialBuild: true
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: Libraries_AllConfigurations
+ timeoutInMinutes: 95
+
+ - template: /eng/pipelines/official/stages/publish.yml
+ parameters:
+ isOfficialBuild: true
diff --git a/eng/pipelines/runtimelab.yml b/eng/pipelines/runtimelab.yml
index 0fe01fd2816f73..7e0aaeab207216 100644
--- a/eng/pipelines/runtimelab.yml
+++ b/eng/pipelines/runtimelab.yml
@@ -49,103 +49,56 @@ extends:
stages:
- stage: Build
jobs:
- - ${{ if ne(variables.isOfficialBuild, true) }}:
- #
- # Build the whole product with Checked CoreCLR and run runtime tests
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
- buildConfig: checked
- platforms:
- - linux_x64
- - windows_x64
- jobParameters:
- timeoutInMinutes: 200
- buildArgs: -s clr+libs+hosts+packs -c debug -rc $(_BuildConfig)
- postBuildSteps:
- - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- parameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
+ #
+ # Build the whole product with Checked CoreCLR and run runtime tests
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ buildConfig: checked
+ platforms:
+ - linux_x64
+ - windows_x64
+ jobParameters:
+ timeoutInMinutes: 200
+ buildArgs: -s clr+libs+hosts+packs -c debug -rc $(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
- #
- # Build the whole product with Release CoreCLR and run libraries tests
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
- buildConfig: release
- platforms:
- - linux_x64
- - windows_x64
- jobParameters:
- timeoutInMinutes: 180
- buildArgs: -s clr+libs+libs.tests+hosts+packs -c $(_BuildConfig) /p:ArchiveTests=true
- postBuildSteps:
- - template: /eng/pipelines/libraries/helix.yml
- parameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Libraries_$(_BuildConfig)
+ #
+ # Build the whole product with Release CoreCLR and run libraries tests
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
+ buildConfig: release
+ platforms:
+ - linux_x64
+ - windows_x64
+ jobParameters:
+ timeoutInMinutes: 180
+ buildArgs: -s clr+libs+libs.tests+hosts+packs -c $(_BuildConfig) /p:ArchiveTests=true
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Libraries_$(_BuildConfig)
- #
- # Build and test libraries AllConfigurations
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
- platforms:
- - windows_x64
- jobParameters:
- buildArgs: -test -s tools+libs+libs.tests -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
- nameSuffix: Libraries_AllConfigurations
- timeoutInMinutes: 150
-
- - ${{ else }}:
- #
- # Build the whole product with Release CoreCLR
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
- buildConfig: release
- platforms:
- - linux_x64
- - windows_x64
- jobParameters:
- isOfficialBuild: true
- timeoutInMinutes: 180
- buildArgs: -s clr+libs+hosts+packs -c $(_BuildConfig)
- postBuildSteps:
- # Upload the results.
- - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- parameters:
- name: $(osGroup)$(osSubgroup)_$(archType)
-
- #
- # Build libraries AllConfigurations for packages
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: Release
- platforms:
- - windows_x64
- jobParameters:
- buildArgs: -s tools+libs -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
- nameSuffix: Libraries_AllConfigurations
- isOfficialBuild: true
- postBuildSteps:
- - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- parameters:
- name: Libraries_AllConfigurations
- timeoutInMinutes: 95
-
- - ${{ if eq(variables.isOfficialBuild, true) }}:
- - template: /eng/pipelines/official/stages/publish.yml
- parameters:
- isOfficialBuild: true
\ No newline at end of file
+ #
+ # Build and test libraries AllConfigurations
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
+ platforms:
+ - windows_x64
+ jobParameters:
+ buildArgs: -test -s tools+libs+libs.tests -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
+ nameSuffix: Libraries_AllConfigurations
+ timeoutInMinutes: 150
diff --git a/eng/testing/BrowserVersions.props b/eng/testing/BrowserVersions.props
index b1e85302a8ed3a..c66b61a6e44042 100644
--- a/eng/testing/BrowserVersions.props
+++ b/eng/testing/BrowserVersions.props
@@ -8,7 +8,9 @@
1250580
https://storage.googleapis.com/chromium-browser-snapshots/Win_x64/1250586
12.3.219
- 124.0.2
+ 125.0.1
0.34.0
+ 125.0.1
+ 0.34.0
diff --git a/eng/testing/WasmRunnerTemplate.cmd b/eng/testing/WasmRunnerTemplate.cmd
index 0c7f3dc2195d26..e043ce7a34e9f5 100644
--- a/eng/testing/WasmRunnerTemplate.cmd
+++ b/eng/testing/WasmRunnerTemplate.cmd
@@ -30,7 +30,11 @@ if [%XHARNESS_COMMAND%] == [] (
if /I [%SCENARIO%]==[WasmTestOnChrome] (
set XHARNESS_COMMAND=test-browser
) else (
- set XHARNESS_COMMAND=test
+ if /I [%SCENARIO%]==[WasmTestOnFirefox] (
+ set XHARNESS_COMMAND=test-browser
+ ) else (
+ set XHARNESS_COMMAND=test
+ )
)
)
@@ -56,11 +60,25 @@ if /I [%XHARNESS_COMMAND%] == [test] (
)
)
) else (
- if [%BROWSER_PATH%] == [] if not [%HELIX_CORRELATION_PAYLOAD%] == [] (
- set "BROWSER_PATH=--browser-path^=%HELIX_CORRELATION_PAYLOAD%\chrome-win\chrome.exe"
- )
- if [%JS_ENGINE_ARGS%] == [] (
- set "JS_ENGINE_ARGS=--browser-arg^=--js-flags^=--stack-trace-limit^=1000"
+ if /I [%SCENARIO%] == [WasmTestOnChrome] (
+ if [%BROWSER_PATH%] == [] if not [%HELIX_CORRELATION_PAYLOAD%] == [] (
+ set "BROWSER_PATH=--browser-path^=%HELIX_CORRELATION_PAYLOAD%\chrome-win\chrome.exe"
+ )
+ if [%JS_ENGINE_ARGS%] == [] (
+ set "JS_ENGINE_ARGS=--browser-arg^=--js-flags^=--stack-trace-limit^=1000"
+ )
+ ) else (
+ if /I [%SCENARIO%] == [WasmTestOnFirefox] (
+ if [%BROWSER_PATH%] == [] if not [%HELIX_CORRELATION_PAYLOAD%] == [] (
+ set "BROWSER_PATH=--browser-path^=%HELIX_CORRELATION_PAYLOAD%\firefox\firefox.exe"
+ )
+ if [%JS_ENGINE%] == [] (
+ set "JS_ENGINE=--browser^=Firefox"
+ )
+ if [%JS_ENGINE_ARGS%] == [] (
+ set "JS_ENGINE_ARGS=--browser-arg^=-private-window"
+ )
+ )
)
)
diff --git a/eng/testing/WasmRunnerTemplate.sh b/eng/testing/WasmRunnerTemplate.sh
index bd7f1faadf3556..6cf4dc11d4beae 100644
--- a/eng/testing/WasmRunnerTemplate.sh
+++ b/eng/testing/WasmRunnerTemplate.sh
@@ -26,7 +26,9 @@ else
fi
if [[ -z "$XHARNESS_COMMAND" ]]; then
- if [[ "$SCENARIO" == "WasmTestOnChrome" || "$SCENARIO" == "wasmtestonchrome" ]]; then
+ if [[ "$SCENARIO" == "WasmTestOnFirefox" || "$SCENARIO" == "wasmtestonfirefox" ]]; then
+ XHARNESS_COMMAND="test-browser"
+ elif [[ "$SCENARIO" == "WasmTestOnChrome" || "$SCENARIO" == "wasmtestonchrome" ]]; then
XHARNESS_COMMAND="test-browser"
else
XHARNESS_COMMAND="test"
@@ -59,8 +61,17 @@ if [[ "$XHARNESS_COMMAND" == "test" ]]; then
fi
fi
else
- if [[ -z "$JS_ENGINE_ARGS" ]]; then
- JS_ENGINE_ARGS="--browser-arg=--js-flags=--stack-trace-limit=1000"
+ if [[ "$SCENARIO" == "WasmTestOnChrome" || "$SCENARIO" == "wasmtestonchrome" ]]; then
+ if [[ -z "$JS_ENGINE_ARGS" ]]; then
+ JS_ENGINE_ARGS="--browser-arg=--js-flags=--stack-trace-limit=1000"
+ fi
+ elif [[ "$SCENARIO" == "WasmTestOnFirefox" || "$SCENARIO" == "wasmtestonfirefox" ]]; then
+ if [[ -z "$JS_ENGINE" ]]; then
+ JS_ENGINE="--browser=Firefox"
+ fi
+ if [[ -z "$JS_ENGINE_ARGS" ]]; then
+ JS_ENGINE_ARGS="--browser-arg=-private-window"
+ fi
fi
fi
diff --git a/eng/testing/linker/SupportFiles/Directory.Build.props b/eng/testing/linker/SupportFiles/Directory.Build.props
index 5a54c83e569231..4e33801ab12837 100644
--- a/eng/testing/linker/SupportFiles/Directory.Build.props
+++ b/eng/testing/linker/SupportFiles/Directory.Build.props
@@ -11,6 +11,7 @@
false
true
+ true
$(NoWarn);IL2121
diff --git a/eng/testing/linker/trimmingTests.props b/eng/testing/linker/trimmingTests.props
index b917cd5fe38268..b822294a93d04c 100644
--- a/eng/testing/linker/trimmingTests.props
+++ b/eng/testing/linker/trimmingTests.props
@@ -1,6 +1,8 @@
- $([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'trimmingTests'))
+ $([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'trimmingTests'))
+ $([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'aotTests'))
+
$([MSBuild]::NormalizeDirectory('$(TrimmingTestDir)', 'projects'))
$(MSBuildThisFileDirectory)project.csproj.template
true
diff --git a/eng/testing/linker/trimmingTests.targets b/eng/testing/linker/trimmingTests.targets
index 926bafa52cfef4..d474e19b69accd 100644
--- a/eng/testing/linker/trimmingTests.targets
+++ b/eng/testing/linker/trimmingTests.targets
@@ -38,7 +38,13 @@
-
+ <_SkippedAppSourceFiles Include="@(TestConsoleAppSourceFiles)" Condition="$([System.String]::Copy('%(TestConsoleAppSourceFiles.SkipOnTestRuntimes)').Contains('$(PackageRID)'))" />
+
+ <_SkippedAppSourceFiles Include="@(TestConsoleAppSourceFiles)" Condition="'$(RunNativeAotTestApps)' == 'true' and '%(TestConsoleAppSourceFiles.NativeAotIncompatible)' == 'true'" />
+
+ <_AppSourceFiles Include="@(TestConsoleAppSourceFiles)" Exclude="@(_SkippedAppSourceFiles)" />
+
+
%(FullPath)
@@ -81,6 +87,9 @@
<_additionalPropertiesString>@(_propertiesAsItems->'<%(Identity)>%(Value)</%(Identity)>', '%0a ')
+
+
+
+ Properties="Configuration=$(Configuration);BuildProjectReferences=false;TargetOS=$(TargetOS);TargetArchitecture=$(TargetArchitecture);_IsPublishing=true" />
+ WasmTestOnV8
@(_NodeNpmModuleStringTrimmed, ',')
+
+
diff --git a/eng/testing/tests.wasi.targets b/eng/testing/tests.wasi.targets
index d147fea218fe4a..e8f75ee3821bc5 100644
--- a/eng/testing/tests.wasi.targets
+++ b/eng/testing/tests.wasi.targets
@@ -46,7 +46,7 @@
<_XHarnessArgs Condition="'$(IsFunctionalTest)' == 'true'" >$(_XHarnessArgs) --expected-exit-code=$(ExpectedExitCode)
<_XHarnessArgs Condition="'$(WasmXHarnessArgs)' != ''" >$(_XHarnessArgs) $(WasmXHarnessArgs)
<_XHarnessArgs Condition="'$(WasmXHarnessTestsTimeout)' != ''" >$(_XHarnessArgs) "--timeout=$(WasmXHarnessTestsTimeout)"
- <_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--max-wasm-stack=134217728
+ <_XHarnessArgs >$(_XHarnessArgs) --engine-arg=-W --engine-arg=max-wasm-stack=134217728
<_XHarnessArgs Condition="'$(WasmXHarnessArgsCli)' != ''" >$(_XHarnessArgs) $(WasmXHarnessArgsCli)
<_InvariantGlobalization Condition="'$(InvariantGlobalization)' == 'true'">--env=DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=true
@@ -147,6 +147,13 @@
+
+ WasmTestOnWasmtime
+
+
+
diff --git a/eng/testing/wasm-provisioning.targets b/eng/testing/wasm-provisioning.targets
index 47076a23770dd3..540fcfb035fa21 100644
--- a/eng/testing/wasm-provisioning.targets
+++ b/eng/testing/wasm-provisioning.targets
@@ -21,18 +21,30 @@
-
- https://ftp.mozilla.org/pub/firefox/releases/$(linux_FirefoxRevision)/linux-x86_64/en-US/firefox-$(linux_FirefoxRevision).tar.bz2
- https://github.com/mozilla/geckodriver/releases/download/v$(linux_GeckoDriverRevision)/geckodriver-v$(linux_GeckoDriverRevision)-linux64.tar.gz
- firefox
- geckodriver
+
$(ArtifactsBinDir)firefox\
firefox
geckodriver
$(ArtifactsBinDir)geckodriver\
+
+
+ https://ftp.mozilla.org/pub/firefox/releases/$(linux_FirefoxRevision)/linux-x86_64/en-US/firefox-$(linux_FirefoxRevision).tar.bz2
+ https://github.com/mozilla/geckodriver/releases/download/v$(linux_GeckoDriverRevision)/geckodriver-v$(linux_GeckoDriverRevision)-linux64.tar.gz
+ firefox
+ geckodriver
$([MSBuild]::NormalizePath($(FirefoxDir), '.install-firefox-$(linux_FirefoxRevision).stamp'))
$([MSBuild]::NormalizePath($(GeckoDriverDir), '.install-geckodriver-$(linux_GeckoDriverRevision).stamp'))
+ $([MSBuild]::NormalizePath($(FirefoxDir), $(FirefoxDirName), $(FirefoxBinaryName)))
+ $([MSBuild]::NormalizePath($(GeckoDriverDir), $(GeckoDriverDirName), $(GeckoDriverBinaryName)))
+
+
+ https://ftp.mozilla.org/pub/firefox/releases/$(win_FirefoxRevision)/win64/en-US/Firefox%20Setup%20$(win_FirefoxRevision).exe
+ https://github.com/mozilla/geckodriver/releases/download/v$(win_GeckoDriverRevision)/geckodriver-v$(win_GeckoDriverRevision)-win64.zip
+ firefox.exe
+ geckodriver.exe
+ $([MSBuild]::NormalizePath($(FirefoxDir), '.install-firefox-$(win_FirefoxRevision).stamp'))
+ $([MSBuild]::NormalizePath($(GeckoDriverDir), '.install-geckodriver-$(win_GeckoDriverRevision).stamp'))
$([MSBuild]::NormalizePath($(FirefoxDir), $(FirefoxDirName), $(FirefoxBinaryName)))
$([MSBuild]::NormalizePath($(GeckoDriverDir), $(GeckoDriverDirName), $(GeckoDriverBinaryName)))
@@ -183,7 +195,7 @@ export __SCRIPT_DIR=%24( cd -- "%24( dirname -- "%24{BASH_SOURCE[0]}" )" &>
+ Condition="!Exists($(FirefoxStampFile)) and '$(InstallFirefoxForTests)' == 'true'">
<_StampFile Include="$(_BrowserStampDir).install-firefox*.stamp" />
@@ -197,24 +209,22 @@ export __SCRIPT_DIR=%24( cd -- "%24( dirname -- "%24{BASH_SOURCE[0]}" )" &>
-
-
-
-
- <_FirefoxBinaryPath>$([MSBuild]::NormalizePath($(FirefoxDir), $(FirefoxBinaryName)))
-
+
+
+
+
-
+
-
+
+ Condition="!Exists($(GeckoDriverStampFile)) and '$(InstallFirefoxForTests)' == 'true'">
<_StampFile Include="$(_BrowserStampDir).install-geckodriver*.stamp" />
@@ -228,18 +238,15 @@ export __SCRIPT_DIR=%24( cd -- "%24( dirname -- "%24{BASH_SOURCE[0]}" )" &>
-
-
-
-
-
- <_GeckoDriverBinaryPath>$([MSBuild]::NormalizePath($(GeckoDriverDir), $(GeckoDriverBinaryName)))
-
+
+
+
+
-
+
-
+
diff --git a/global.json b/global.json
index 4348e778e29b42..a62f147f01c51e 100644
--- a/global.json
+++ b/global.json
@@ -8,11 +8,11 @@
"dotnet": "9.0.100-preview.3.24204.13"
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24217.1",
- "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24217.1",
- "Microsoft.DotNet.SharedFramework.Sdk": "9.0.0-beta.24217.1",
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24253.1",
+ "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24253.1",
+ "Microsoft.DotNet.SharedFramework.Sdk": "9.0.0-beta.24253.1",
"Microsoft.Build.NoTargets": "3.7.0",
"Microsoft.Build.Traversal": "3.4.0",
- "Microsoft.NET.Sdk.IL": "9.0.0-preview.4.24215.1"
+ "Microsoft.NET.Sdk.IL": "9.0.0-preview.4.24229.1"
}
}
diff --git a/src/coreclr/CMakeLists.txt b/src/coreclr/CMakeLists.txt
index aaf4005aa7394b..7ed0d509212cc4 100644
--- a/src/coreclr/CMakeLists.txt
+++ b/src/coreclr/CMakeLists.txt
@@ -206,6 +206,7 @@ if(CLR_CMAKE_HOST_UNIX)
# warnings and errors to be suppressed.
# Suppress these warnings here to avoid breaking the build.
add_compile_options($<$:-Wno-null-arithmetic>)
+ add_compile_options($<$:-Wno-sync-alignment>)
add_compile_options($<$:-Wno-conversion-null>)
add_compile_options($<$:-Wno-pointer-arith>)
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
index de7b3021c458fe..74e07398481681 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
@@ -694,7 +694,7 @@ public ArrayInitializeCache(RuntimeType arrayType)
// it for type and executes it.
//
// The "T" will reflect the interface used to invoke the method. The actual runtime "this" will be
- // array that is castable to "T[]" (i.e. for primitivs and valuetypes, it will be exactly
+ // array that is castable to "T[]" (i.e. for primitives and valuetypes, it will be exactly
// "T[]" - for orefs, it may be a "U[]" where U derives from T.)
//----------------------------------------------------------------------------------------
internal sealed class SZArrayHelper
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/MdImport.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/MdImport.cs
index d1944cbbf77ceb..6f74dde91fe7e0 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/MdImport.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/MdImport.cs
@@ -171,39 +171,42 @@ public static bool IsTokenOfType(int token, params MetadataTokenType[] types)
public override string ToString() => string.Create(CultureInfo.InvariantCulture, stackalloc char[64], $"0x{Value:x8}");
}
- internal unsafe struct MetadataEnumResult
+ internal ref struct MetadataEnumResult
{
- // Keep the definition in sync with vm\ManagedMdImport.hpp
- private int[] largeResult;
- private int length;
- private fixed int smallResult[16];
+ internal int _length;
+
+ internal const int SmallIntArrayLength = 16;
+
+ [InlineArray(SmallIntArrayLength)]
+ internal struct SmallIntArray
+ {
+ public int e;
+ }
+ internal SmallIntArray _smallResult;
+ internal int[]? _largeResult;
- public int Length => length;
+ public int Length => _length;
public int this[int index]
{
get
{
Debug.Assert(0 <= index && index < Length);
- if (largeResult != null)
- return largeResult[index];
+ if (_largeResult != null)
+ return _largeResult[index];
- fixed (int* p = smallResult)
- return p[index];
+ return _smallResult[index];
}
}
}
#pragma warning disable CA1066 // IEquatable interface implementation isn't used
- internal readonly struct MetadataImport
+ internal readonly partial struct MetadataImport
#pragma warning restore CA1067
{
private readonly IntPtr m_metadataImport2;
- private readonly object? m_keepalive;
#region Override methods from Object
- internal static readonly MetadataImport EmptyImport = new MetadataImport((IntPtr)0, null);
-
public override int GetHashCode()
{
return HashCode.Combine(m_metadataImport2);
@@ -225,47 +228,104 @@ private bool Equals(MetadataImport import)
#region Static Members
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetMarshalAs(IntPtr pNativeType, int cNativeType, out int unmanagedType, out int safeArraySubType, out string? safeArrayUserDefinedSubType,
- out int arraySubType, out int sizeParamIndex, out int sizeConst, out string? marshalType, out string? marshalCookie,
+ private static extern unsafe bool GetMarshalAs(
+ IntPtr pNativeType,
+ int cNativeType,
+ out int unmanagedType,
+ out int safeArraySubType,
+ out byte* safeArrayUserDefinedSubType,
+ out int arraySubType,
+ out int sizeParamIndex,
+ out int sizeConst,
+ out byte* marshalType,
+ out byte* marshalCookie,
out int iidParamIndex);
- internal static void GetMarshalAs(ConstArray nativeType,
- out UnmanagedType unmanagedType, out VarEnum safeArraySubType, out string? safeArrayUserDefinedSubType,
- out UnmanagedType arraySubType, out int sizeParamIndex, out int sizeConst, out string? marshalType, out string? marshalCookie,
- out int iidParamIndex)
- {
- _GetMarshalAs(nativeType.Signature, (int)nativeType.Length,
- out int _unmanagedType, out int _safeArraySubType, out safeArrayUserDefinedSubType,
- out int _arraySubType, out sizeParamIndex, out sizeConst, out marshalType, out marshalCookie,
- out iidParamIndex);
- unmanagedType = (UnmanagedType)_unmanagedType;
- safeArraySubType = (VarEnum)_safeArraySubType;
- arraySubType = (UnmanagedType)_arraySubType;
- }
- #endregion
+ internal static unsafe MarshalAsAttribute GetMarshalAs(ConstArray nativeType, RuntimeModule scope)
+ {
+ if (!GetMarshalAs(
+ nativeType.Signature,
+ nativeType.Length,
+ out int unmanagedTypeRaw,
+ out int safeArraySubTypeRaw,
+ out byte* safeArrayUserDefinedSubTypeRaw,
+ out int arraySubTypeRaw,
+ out int sizeParamIndex,
+ out int sizeConst,
+ out byte* marshalTypeRaw,
+ out byte* marshalCookieRaw,
+ out int iidParamIndex))
+ {
+ throw new BadImageFormatException();
+ }
- #region Internal Static Members
- internal static void ThrowError(int hResult)
- {
- throw new MetadataException(hResult);
+ string? safeArrayUserDefinedTypeName = safeArrayUserDefinedSubTypeRaw == null
+ ? null
+ : Text.Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpanFromNullTerminated(safeArrayUserDefinedSubTypeRaw));
+ string? marshalTypeName = marshalTypeRaw == null
+ ? null
+ : Text.Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpanFromNullTerminated(marshalTypeRaw));
+ string? marshalCookie = marshalCookieRaw == null
+ ? null
+ : Text.Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpanFromNullTerminated(marshalCookieRaw));
+
+ RuntimeType? safeArrayUserDefinedType = string.IsNullOrEmpty(safeArrayUserDefinedTypeName) ? null :
+ TypeNameParser.GetTypeReferencedByCustomAttribute(safeArrayUserDefinedTypeName, scope);
+ RuntimeType? marshalTypeRef = null;
+
+ try
+ {
+ marshalTypeRef = marshalTypeName is null ? null : TypeNameParser.GetTypeReferencedByCustomAttribute(marshalTypeName, scope);
+ }
+ catch (TypeLoadException)
+ {
+ // The user may have supplied a bad type name string causing this TypeLoadException
+ // Regardless, we return the bad type name
+ Debug.Assert(marshalTypeName is not null);
+ }
+
+ MarshalAsAttribute attribute = new MarshalAsAttribute((UnmanagedType)unmanagedTypeRaw);
+
+ attribute.SafeArraySubType = (VarEnum)safeArraySubTypeRaw;
+ attribute.SafeArrayUserDefinedSubType = safeArrayUserDefinedType;
+ attribute.IidParameterIndex = iidParamIndex;
+ attribute.ArraySubType = (UnmanagedType)arraySubTypeRaw;
+ attribute.SizeParamIndex = (short)sizeParamIndex;
+ attribute.SizeConst = sizeConst;
+ attribute.MarshalType = marshalTypeName;
+ attribute.MarshalTypeRef = marshalTypeRef;
+ attribute.MarshalCookie = marshalCookie;
+
+ return attribute;
}
#endregion
#region Constructor
- internal MetadataImport(IntPtr metadataImport2, object? keepalive)
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ private static extern unsafe IntPtr GetMetadataImport(RuntimeModule module);
+
+ internal MetadataImport(RuntimeModule module)
{
- m_metadataImport2 = metadataImport2;
- m_keepalive = keepalive;
+ ArgumentNullException.ThrowIfNull(module);
+
+ // The MetadataImport instance needs to be acquired in this manner
+ // since the instance can be replaced during HotReload and EnC scenarios.
+ m_metadataImport2 = GetMetadataImport(module);
}
#endregion
- #region FCalls
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _Enum(IntPtr scope, int type, int parent, out MetadataEnumResult result);
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "MetadataImport_Enum")]
+ private static unsafe partial void Enum(IntPtr scope, int type, int parent, ref int length, int* shortResult, ObjectHandleOnStack longResult);
- public void Enum(MetadataTokenType type, int parent, out MetadataEnumResult result)
+ public unsafe void Enum(MetadataTokenType type, int parent, out MetadataEnumResult result)
{
- _Enum(m_metadataImport2, (int)type, parent, out result);
+ result = default;
+ int length = MetadataEnumResult.SmallIntArrayLength;
+ fixed (int* p = &result._smallResult.e)
+ {
+ Enum(m_metadataImport2, (int)type, parent, ref length, p, ObjectHandleOnStack.Create(ref result._largeResult));
+ }
+ result._length = length;
}
public void EnumNestedTypes(int mdTypeDef, out MetadataEnumResult result)
@@ -298,117 +358,131 @@ public void EnumEvents(int mdTypeDef, out MetadataEnumResult result)
Enum(MetadataTokenType.Event, mdTypeDef, out result);
}
+ private static unsafe string? ConvertMetadataStringPermitInvalidContent(char* stringMetadataEncoding, int length)
+ {
+ Debug.Assert(stringMetadataEncoding != null);
+ // Metadata encoding is always UTF-16LE, but user strings can be leveraged to encode invalid surrogates.
+ // This means we rely on the string's constructor rather than the stricter Encoding.Unicode API.
+ return new string(stringMetadataEncoding, 0, length);
+ }
+
+ #region FCalls
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern string? _GetDefaultValue(IntPtr scope, int mdToken, out long value, out int length, out int corElementType);
- public string? GetDefaultValue(int mdToken, out long value, out int length, out CorElementType corElementType)
+ private static extern unsafe int GetDefaultValue(
+ IntPtr scope,
+ int mdToken,
+ out long value,
+ out char* stringMetadataEncoding,
+ out int length,
+ out int corElementType);
+
+ public unsafe string? GetDefaultValue(int mdToken, out long value, out int length, out CorElementType corElementType)
{
- string? stringVal = _GetDefaultValue(m_metadataImport2, mdToken, out value, out length, out int _corElementType);
- corElementType = (CorElementType)_corElementType;
- return stringVal;
+ ThrowBadImageExceptionForHR(GetDefaultValue(m_metadataImport2, mdToken, out value, out char* stringMetadataEncoding, out length, out int corElementTypeRaw));
+
+ corElementType = (CorElementType)corElementTypeRaw;
+
+ if (corElementType is CorElementType.ELEMENT_TYPE_STRING
+ && stringMetadataEncoding != null)
+ {
+ return ConvertMetadataStringPermitInvalidContent(stringMetadataEncoding, length);
+ }
+
+ return null;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetUserString(IntPtr scope, int mdToken, void** name, out int length);
+ private static extern unsafe int GetUserString(IntPtr scope, int mdToken, out char* stringMetadataEncoding, out int length);
+
public unsafe string? GetUserString(int mdToken)
{
- void* name;
- _GetUserString(m_metadataImport2, mdToken, &name, out int length);
+ ThrowBadImageExceptionForHR(GetUserString(m_metadataImport2, mdToken, out char* stringMetadataEncoding, out int length));
- return name != null ?
- new string((char*)name, 0, length) :
+ return stringMetadataEncoding != null ?
+ ConvertMetadataStringPermitInvalidContent(stringMetadataEncoding, length) :
null;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetName(IntPtr scope, int mdToken, void** name);
+ private static extern unsafe int GetName(IntPtr scope, int mdToken, out byte* name);
+
public unsafe MdUtf8String GetName(int mdToken)
{
- void* name;
- _GetName(m_metadataImport2, mdToken, &name);
-
+ ThrowBadImageExceptionForHR(GetName(m_metadataImport2, mdToken, out byte* name));
return new MdUtf8String(name);
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetNamespace(IntPtr scope, int mdToken, void** namesp);
+ private static extern unsafe int GetNamespace(IntPtr scope, int mdToken, out byte* namesp);
+
public unsafe MdUtf8String GetNamespace(int mdToken)
{
- void* namesp;
- _GetNamespace(m_metadataImport2, mdToken, &namesp);
-
+ ThrowBadImageExceptionForHR(GetNamespace(m_metadataImport2, mdToken, out byte* namesp));
return new MdUtf8String(namesp);
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetEventProps(IntPtr scope, int mdToken, void** name, out int eventAttributes);
+ private static extern unsafe int GetEventProps(IntPtr scope, int mdToken, out void* name, out int eventAttributes);
+
public unsafe void GetEventProps(int mdToken, out void* name, out EventAttributes eventAttributes)
{
- void* _name;
- _GetEventProps(m_metadataImport2, mdToken, &_name, out int _eventAttributes);
- name = _name;
- eventAttributes = (EventAttributes)_eventAttributes;
+ ThrowBadImageExceptionForHR(GetEventProps(m_metadataImport2, mdToken, out name, out int eventAttributesRaw));
+ eventAttributes = (EventAttributes)eventAttributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetFieldDefProps(IntPtr scope, int mdToken, out int fieldAttributes);
+ private static extern int GetFieldDefProps(IntPtr scope, int mdToken, out int fieldAttributes);
+
public void GetFieldDefProps(int mdToken, out FieldAttributes fieldAttributes)
{
- _GetFieldDefProps(m_metadataImport2, mdToken, out int _fieldAttributes);
- fieldAttributes = (FieldAttributes)_fieldAttributes;
+ ThrowBadImageExceptionForHR(GetFieldDefProps(m_metadataImport2, mdToken, out int fieldAttributesRaw));
+ fieldAttributes = (FieldAttributes)fieldAttributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetPropertyProps(IntPtr scope,
- int mdToken, void** name, out int propertyAttributes, out ConstArray signature);
+ private static extern unsafe int GetPropertyProps(IntPtr scope, int mdToken, out void* name, out int propertyAttributes, out ConstArray signature);
+
public unsafe void GetPropertyProps(int mdToken, out void* name, out PropertyAttributes propertyAttributes, out ConstArray signature)
{
- void* _name;
- _GetPropertyProps(m_metadataImport2, mdToken, &_name, out int _propertyAttributes, out signature);
- name = _name;
- propertyAttributes = (PropertyAttributes)_propertyAttributes;
+ ThrowBadImageExceptionForHR(GetPropertyProps(m_metadataImport2, mdToken, out name, out int propertyAttributesRaw, out signature));
+ propertyAttributes = (PropertyAttributes)propertyAttributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetParentToken(IntPtr scope,
- int mdToken, out int tkParent);
+ private static extern int GetParentToken(IntPtr scope, int mdToken, out int tkParent);
+
public int GetParentToken(int tkToken)
{
- _GetParentToken(m_metadataImport2, tkToken, out int tkParent);
+ ThrowBadImageExceptionForHR(GetParentToken(m_metadataImport2, tkToken, out int tkParent));
return tkParent;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetParamDefProps(IntPtr scope,
- int parameterToken, out int sequence, out int attributes);
+ private static extern int GetParamDefProps(IntPtr scope, int parameterToken, out int sequence, out int attributes);
+
public void GetParamDefProps(int parameterToken, out int sequence, out ParameterAttributes attributes)
{
-
- _GetParamDefProps(m_metadataImport2, parameterToken, out sequence, out int _attributes);
-
- attributes = (ParameterAttributes)_attributes;
+ ThrowBadImageExceptionForHR(GetParamDefProps(m_metadataImport2, parameterToken, out sequence, out int attributesRaw));
+ attributes = (ParameterAttributes)attributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetGenericParamProps(IntPtr scope,
- int genericParameter,
- out int flags);
+ private static extern int GetGenericParamProps(IntPtr scope, int genericParameter, out int flags);
public void GetGenericParamProps(
int genericParameter,
out GenericParameterAttributes attributes)
{
- _GetGenericParamProps(m_metadataImport2, genericParameter, out int _attributes);
- attributes = (GenericParameterAttributes)_attributes;
+ ThrowBadImageExceptionForHR(GetGenericParamProps(m_metadataImport2, genericParameter, out int attributesRaw));
+ attributes = (GenericParameterAttributes)attributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetScopeProps(IntPtr scope,
- out Guid mvid);
+ private static extern int GetScopeProps(IntPtr scope, out Guid mvid);
- public void GetScopeProps(
- out Guid mvid)
+ public void GetScopeProps(out Guid mvid)
{
- _GetScopeProps(m_metadataImport2, out mvid);
+ ThrowBadImageExceptionForHR(GetScopeProps(m_metadataImport2, out mvid));
}
public ConstArray GetMethodSignature(MetadataToken token)
@@ -420,47 +494,36 @@ public ConstArray GetMethodSignature(MetadataToken token)
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetSigOfMethodDef(IntPtr scope,
- int methodToken,
- ref ConstArray signature);
+ private static extern int GetSigOfMethodDef(IntPtr scope, int methodToken, ref ConstArray signature);
public ConstArray GetSigOfMethodDef(int methodToken)
{
ConstArray signature = default;
-
- _GetSigOfMethodDef(m_metadataImport2, methodToken, ref signature);
-
+ ThrowBadImageExceptionForHR(GetSigOfMethodDef(m_metadataImport2, methodToken, ref signature));
return signature;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetSignatureFromToken(IntPtr scope,
- int methodToken,
- ref ConstArray signature);
+ private static extern int GetSignatureFromToken(IntPtr scope, int methodToken, ref ConstArray signature);
public ConstArray GetSignatureFromToken(int token)
{
ConstArray signature = default;
-
- _GetSignatureFromToken(m_metadataImport2, token, ref signature);
-
+ ThrowBadImageExceptionForHR(GetSignatureFromToken(m_metadataImport2, token, ref signature));
return signature;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetMemberRefProps(IntPtr scope,
- int memberTokenRef,
- out ConstArray signature);
+ private static extern int GetMemberRefProps(IntPtr scope, int memberTokenRef, out ConstArray signature);
public ConstArray GetMemberRefProps(int memberTokenRef)
{
- _GetMemberRefProps(m_metadataImport2, memberTokenRef, out ConstArray signature);
-
+ ThrowBadImageExceptionForHR(GetMemberRefProps(m_metadataImport2, memberTokenRef, out ConstArray signature));
return signature;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetCustomAttributeProps(IntPtr scope,
+ private static extern int GetCustomAttributeProps(IntPtr scope,
int customAttributeToken,
out int constructorToken,
out ConstArray signature);
@@ -470,66 +533,62 @@ public void GetCustomAttributeProps(
out int constructorToken,
out ConstArray signature)
{
- _GetCustomAttributeProps(m_metadataImport2, customAttributeToken,
- out constructorToken, out signature);
+ ThrowBadImageExceptionForHR(GetCustomAttributeProps(m_metadataImport2, customAttributeToken, out constructorToken, out signature));
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetClassLayout(IntPtr scope,
- int typeTokenDef, out int packSize, out int classSize);
+ private static extern int GetClassLayout(IntPtr scope, int typeTokenDef, out int packSize, out int classSize);
+
public void GetClassLayout(
int typeTokenDef,
out int packSize,
out int classSize)
{
- _GetClassLayout(m_metadataImport2, typeTokenDef, out packSize, out classSize);
+ ThrowBadImageExceptionForHR(GetClassLayout(m_metadataImport2, typeTokenDef, out packSize, out classSize));
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern bool _GetFieldOffset(IntPtr scope,
- int typeTokenDef, int fieldTokenDef, out int offset);
+ private static extern int GetFieldOffset(IntPtr scope, int typeTokenDef, int fieldTokenDef, out int offset, out bool found);
+
public bool GetFieldOffset(
int typeTokenDef,
int fieldTokenDef,
out int offset)
{
- return _GetFieldOffset(m_metadataImport2, typeTokenDef, fieldTokenDef, out offset);
+ int hr = GetFieldOffset(m_metadataImport2, typeTokenDef, fieldTokenDef, out offset, out bool found);
+ if (!found && hr < 0)
+ {
+ throw new BadImageFormatException();
+ }
+ return found;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetSigOfFieldDef(IntPtr scope,
- int fieldToken,
- ref ConstArray fieldMarshal);
+ private static extern int GetSigOfFieldDef(IntPtr scope, int fieldToken, ref ConstArray fieldMarshal);
public ConstArray GetSigOfFieldDef(int fieldToken)
{
- ConstArray fieldMarshal = default;
-
- _GetSigOfFieldDef(m_metadataImport2, fieldToken, ref fieldMarshal);
-
- return fieldMarshal;
+ ConstArray sig = default;
+ ThrowBadImageExceptionForHR(GetSigOfFieldDef(m_metadataImport2, fieldToken, ref sig));
+ return sig;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void _GetFieldMarshal(IntPtr scope,
- int fieldToken,
- ref ConstArray fieldMarshal);
+ private static extern int GetFieldMarshal(IntPtr scope, int fieldToken, ref ConstArray fieldMarshal);
public ConstArray GetFieldMarshal(int fieldToken)
{
ConstArray fieldMarshal = default;
-
- _GetFieldMarshal(m_metadataImport2, fieldToken, ref fieldMarshal);
-
+ ThrowBadImageExceptionForHR(GetFieldMarshal(m_metadataImport2, fieldToken, ref fieldMarshal));
return fieldMarshal;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe void _GetPInvokeMap(IntPtr scope,
+ private static extern unsafe int GetPInvokeMap(IntPtr scope,
int token,
out int attributes,
- void** importName,
- void** importDll);
+ out byte* importName,
+ out byte* importDll);
public unsafe void GetPInvokeMap(
int token,
@@ -537,28 +596,28 @@ public unsafe void GetPInvokeMap(
out string importName,
out string importDll)
{
- void* _importName, _importDll;
- _GetPInvokeMap(m_metadataImport2, token, out int _attributes, &_importName, &_importDll);
- importName = new MdUtf8String(_importName).ToString();
- importDll = new MdUtf8String(_importDll).ToString();
+ ThrowBadImageExceptionForHR(GetPInvokeMap(m_metadataImport2, token, out int attributesRaw, out byte* importNameRaw, out byte* importDllRaw));
- attributes = (PInvokeAttributes)_attributes;
+ importName = Text.Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpanFromNullTerminated(importNameRaw));
+ importDll = Text.Encoding.UTF8.GetString(MemoryMarshal.CreateReadOnlySpanFromNullTerminated(importDllRaw));
+ attributes = (PInvokeAttributes)attributesRaw;
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern bool _IsValidToken(IntPtr scope, int token);
+ private static extern bool IsValidToken(IntPtr scope, int token);
+
public bool IsValidToken(int token)
{
- return _IsValidToken(m_metadataImport2, token);
+ return IsValidToken(m_metadataImport2, token);
}
#endregion
- }
-
- internal sealed class MetadataException : Exception
- {
- private readonly int m_hr;
- internal MetadataException(int hr) { m_hr = hr; }
- public override string ToString() => $"{nameof(MetadataException)} HResult = {m_hr:x}.";
+ private static void ThrowBadImageExceptionForHR(int hr)
+ {
+ if (hr < 0)
+ {
+ throw new BadImageFormatException();
+ }
+ }
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
index dd0b5cf897f4d0..38663e57cde246 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
@@ -215,6 +215,7 @@ internal static CustomAttributeRecord[] GetCustomAttributeRecords(RuntimeModule
scope.GetCustomAttributeProps(tkCustomAttributeTokens[i],
out records[i].tkCtor.Value, out records[i].blob);
}
+ GC.KeepAlive(module);
return records;
}
@@ -250,13 +251,13 @@ internal static CustomAttributeTypedArgument Filter(IList a
private RuntimeCustomAttributeData(RuntimeModule scope, MetadataToken caCtorToken, in ConstArray blob)
{
m_scope = scope;
- m_ctor = (RuntimeConstructorInfo)RuntimeType.GetMethodBase(scope, caCtorToken)!;
+ m_ctor = (RuntimeConstructorInfo)RuntimeType.GetMethodBase(m_scope, caCtorToken)!;
if (m_ctor!.DeclaringType!.IsGenericType)
{
- MetadataImport metadataScope = scope.MetadataImport;
- Type attributeType = scope.ResolveType(metadataScope.GetParentToken(caCtorToken), null, null)!;
- m_ctor = (RuntimeConstructorInfo)scope.ResolveMethod(caCtorToken, attributeType.GenericTypeArguments, null)!.MethodHandle.GetMethodInfo();
+ MetadataImport metadataScope = m_scope.MetadataImport;
+ Type attributeType = m_scope.ResolveType(metadataScope.GetParentToken(caCtorToken), null, null)!;
+ m_ctor = (RuntimeConstructorInfo)m_scope.ResolveMethod(caCtorToken, attributeType.GenericTypeArguments, null)!.MethodHandle.GetMethodInfo();
}
ReadOnlySpan parameters = m_ctor.GetParametersAsSpan();
@@ -1466,6 +1467,7 @@ private static bool IsCustomAttributeDefined(
}
}
}
+ GC.KeepAlive(decoratedModule);
return false;
}
@@ -1615,6 +1617,7 @@ private static void AddCustomAttributes(
attributes.Add(attribute);
}
+ GC.KeepAlive(decoratedModule);
}
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026:RequiresUnreferencedCode",
@@ -2194,10 +2197,11 @@ internal static bool IsDefined(RuntimeFieldInfo field, RuntimeType? caType)
if ((method.Attributes & MethodAttributes.PinvokeImpl) == 0)
return null;
- MetadataImport scope = ModuleHandle.GetMetadataImport(method.Module.ModuleHandle.GetRuntimeModule());
+ RuntimeModule module = method.Module.ModuleHandle.GetRuntimeModule();
+ MetadataImport scope = module.MetadataImport;
int token = method.MetadataToken;
-
scope.GetPInvokeMap(token, out PInvokeAttributes flags, out string entryPoint, out string dllName);
+ GC.KeepAlive(module);
CharSet charSet = CharSet.None;
@@ -2252,51 +2256,25 @@ internal static bool IsDefined(RuntimeFieldInfo field, RuntimeType? caType)
private static MarshalAsAttribute? GetMarshalAsCustomAttribute(int token, RuntimeModule scope)
{
- ConstArray nativeType = ModuleHandle.GetMetadataImport(scope).GetFieldMarshal(token);
+ ConstArray nativeType = scope.MetadataImport.GetFieldMarshal(token);
if (nativeType.Length == 0)
return null;
- MetadataImport.GetMarshalAs(nativeType,
- out UnmanagedType unmanagedType, out VarEnum safeArraySubType, out string? safeArrayUserDefinedTypeName, out UnmanagedType arraySubType, out int sizeParamIndex,
- out int sizeConst, out string? marshalTypeName, out string? marshalCookie, out int iidParamIndex);
-
- RuntimeType? safeArrayUserDefinedType = string.IsNullOrEmpty(safeArrayUserDefinedTypeName) ? null :
- TypeNameParser.GetTypeReferencedByCustomAttribute(safeArrayUserDefinedTypeName, scope);
- RuntimeType? marshalTypeRef = null;
-
- try
- {
- marshalTypeRef = marshalTypeName is null ? null : TypeNameParser.GetTypeReferencedByCustomAttribute(marshalTypeName, scope);
- }
- catch (TypeLoadException)
- {
- // The user may have supplied a bad type name string causing this TypeLoadException
- // Regardless, we return the bad type name
- Debug.Assert(marshalTypeName is not null);
- }
-
- MarshalAsAttribute attribute = new MarshalAsAttribute(unmanagedType);
-
- attribute.SafeArraySubType = safeArraySubType;
- attribute.SafeArrayUserDefinedSubType = safeArrayUserDefinedType;
- attribute.IidParameterIndex = iidParamIndex;
- attribute.ArraySubType = arraySubType;
- attribute.SizeParamIndex = (short)sizeParamIndex;
- attribute.SizeConst = sizeConst;
- attribute.MarshalType = marshalTypeName;
- attribute.MarshalTypeRef = marshalTypeRef;
- attribute.MarshalCookie = marshalCookie;
-
- return attribute;
+ return MetadataImport.GetMarshalAs(nativeType, scope);
}
private static FieldOffsetAttribute? GetFieldOffsetCustomAttribute(RuntimeFieldInfo field)
{
- if (field.DeclaringType is not null &&
- field.GetRuntimeModule().MetadataImport.GetFieldOffset(field.DeclaringType.MetadataToken, field.MetadataToken, out int fieldOffset))
- return new FieldOffsetAttribute(fieldOffset);
-
+ if (field.DeclaringType is not null)
+ {
+ RuntimeModule module = field.GetRuntimeModule();
+ if (module.MetadataImport.GetFieldOffset(field.DeclaringType.MetadataToken, field.MetadataToken, out int fieldOffset))
+ {
+ return new FieldOffsetAttribute(fieldOffset);
+ }
+ GC.KeepAlive(module);
+ }
return null;
}
@@ -2322,7 +2300,9 @@ internal static bool IsDefined(RuntimeFieldInfo field, RuntimeType? caType)
case TypeAttributes.UnicodeClass: charSet = CharSet.Unicode; break;
default: Debug.Fail("Unreachable code"); break;
}
- type.GetRuntimeModule().MetadataImport.GetClassLayout(type.MetadataToken, out int pack, out int size);
+ RuntimeModule module = type.GetRuntimeModule();
+ module.MetadataImport.GetClassLayout(type.MetadataToken, out int pack, out int size);
+ GC.KeepAlive(module);
StructLayoutAttribute attribute = new StructLayoutAttribute(layoutKind);
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeModule.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeModule.cs
index 8e7e4a05c73dde..4e9a4dffeb209a 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeModule.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeModule.cs
@@ -202,7 +202,7 @@ public override byte[] ResolveSignature(int metadataToken)
if (declaringType.IsGenericType || declaringType.IsArray)
{
- int tkDeclaringType = ModuleHandle.GetMetadataImport(this).GetParentToken(metadataToken);
+ int tkDeclaringType = MetadataImport.GetParentToken(metadataToken);
declaringType = (RuntimeType)ResolveType(tkDeclaringType, genericTypeArguments, genericMethodArguments);
}
@@ -353,7 +353,7 @@ public override void GetPEKind(out PortableExecutableKinds peKind, out ImageFile
#region Internal Members
internal RuntimeType RuntimeType => m_runtimeType ??= ModuleHandle.GetModuleType(this);
- internal MetadataImport MetadataImport => ModuleHandle.GetMetadataImport(this);
+ internal MetadataImport MetadataImport => new MetadataImport(this);
#endregion
#region ICustomAttributeProvider Members
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeParameterInfo.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeParameterInfo.cs
index 5af77b790f49c3..24dd89c2113172 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeParameterInfo.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeParameterInfo.cs
@@ -29,6 +29,11 @@ internal static ParameterInfo GetReturnParameter(IRuntimeMethodInfo method, Memb
private static ParameterInfo[] GetParameters(
IRuntimeMethodInfo methodHandle, MemberInfo member, Signature sig, out ParameterInfo? returnParameter, bool fetchReturnParameter)
{
+ // The lifetime rules for MetadataImport expect these two objects to be the same instance.
+ // See the lifetime of MetadataImport, acquired through IRuntimeMethodInfo, but extended
+ // through the MemberInfo instance.
+ Debug.Assert(ReferenceEquals(methodHandle, member));
+
returnParameter = null;
int sigArgCount = sig.Arguments.Length;
ParameterInfo[] args =
@@ -43,7 +48,7 @@ private static ParameterInfo[] GetParameters(
// are generated on the fly by the runtime.
if (!MdToken.IsNullToken(tkMethodDef))
{
- MetadataImport scope = RuntimeTypeHandle.GetMetadataImport(RuntimeMethodHandle.GetDeclaringType(methodHandle));
+ MetadataImport scope = RuntimeMethodHandle.GetDeclaringType(methodHandle).GetRuntimeModule().MetadataImport;
scope.EnumParams(tkMethodDef, out MetadataEnumResult tkParamDefs);
@@ -73,7 +78,7 @@ private static ParameterInfo[] GetParameters(
}
else if (!fetchReturnParameter && position >= 0)
{
- // position beyong sigArgCount?
+ // position beyond sigArgCount?
if (position >= sigArgCount)
throw new BadImageFormatException(SR.BadImageFormat_ParameterSignatureMismatch);
@@ -86,7 +91,7 @@ private static ParameterInfo[] GetParameters(
// Fill in empty ParameterInfos for those without tokens
if (fetchReturnParameter)
{
- returnParameter ??= new RuntimeParameterInfo(sig, MetadataImport.EmptyImport, 0, -1, (ParameterAttributes)0, member);
+ returnParameter ??= new RuntimeParameterInfo(sig, default, 0, -1, (ParameterAttributes)0, member);
}
else
{
@@ -97,7 +102,7 @@ private static ParameterInfo[] GetParameters(
if (args[i] != null)
continue;
- args[i] = new RuntimeParameterInfo(sig, MetadataImport.EmptyImport, 0, i, (ParameterAttributes)0, member);
+ args[i] = new RuntimeParameterInfo(sig, default, 0, i, (ParameterAttributes)0, member);
}
}
}
@@ -165,7 +170,7 @@ private RuntimeParameterInfo(RuntimeParameterInfo accessor, MemberInfo member)
PositionImpl = accessor.Position;
AttrsImpl = accessor.Attributes;
- // Strictly speeking, property's don't contain parameter tokens
+ // Strictly speaking, properties don't contain parameter tokens
// However we need this to make ca's work... oh well...
m_tkParamDef = MdToken.IsNullToken(accessor.MetadataToken) ? (int)MetadataTokenType.ParamDef : accessor.MetadataToken;
m_scope = accessor.m_scope;
@@ -176,7 +181,7 @@ private RuntimeParameterInfo(
int position, ParameterAttributes attributes, MemberInfo member)
{
Debug.Assert(member != null);
- Debug.Assert(MdToken.IsNullToken(tkParamDef) == scope.Equals(MetadataImport.EmptyImport));
+ Debug.Assert(MdToken.IsNullToken(tkParamDef) == scope.Equals((MetadataImport)default));
Debug.Assert(MdToken.IsNullToken(tkParamDef) || MdToken.IsTokenOfType(tkParamDef, MetadataTokenType.ParamDef));
PositionImpl = position;
@@ -201,7 +206,7 @@ internal RuntimeParameterInfo(MethodInfo owner, string? name, Type parameterType
PositionImpl = position;
AttrsImpl = ParameterAttributes.None;
m_tkParamDef = (int)MetadataTokenType.ParamDef;
- m_scope = MetadataImport.EmptyImport;
+ m_scope = default;
}
#endregion
@@ -239,6 +244,7 @@ public override string? Name
if (!MdToken.IsNullToken(m_tkParamDef))
{
string name = m_scope.GetName(m_tkParamDef).ToString();
+ GC.KeepAlive(this);
NameImpl = name;
}
@@ -339,6 +345,7 @@ private bool TryGetDefaultValueInternal(bool raw, out object? defaultValue)
#region Look for a default value in metadata
// This will return DBNull.Value if no constant value is defined on m_tkParamDef in the metadata.
defaultValue = MdConstant.GetValue(m_scope, m_tkParamDef, ParameterType.TypeHandle, raw);
+ GC.KeepAlive(this);
// If default value is not specified in metadata, look for it in custom attributes
if (defaultValue == DBNull.Value)
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimePropertyInfo.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimePropertyInfo.cs
index 933e05d0bf7854..d49ac821e684d9 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimePropertyInfo.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimePropertyInfo.cs
@@ -35,7 +35,8 @@ internal RuntimePropertyInfo(
Debug.Assert(reflectedTypeCache != null);
Debug.Assert(!reflectedTypeCache.IsGlobal);
- MetadataImport scope = declaredType.GetRuntimeModule().MetadataImport;
+ RuntimeModule module = declaredType.GetRuntimeModule();
+ MetadataImport scope = module.MetadataImport;
m_token = tkProperty;
m_reflectedTypeCache = reflectedTypeCache;
@@ -47,6 +48,7 @@ internal RuntimePropertyInfo(
out _, out _, out _,
out m_getterMethod, out m_setterMethod, out m_otherMethod,
out isPrivate, out m_bindingFlags);
+ GC.KeepAlive(module);
}
#endregion
@@ -65,9 +67,9 @@ internal Signature Signature
{
if (m_signature == null)
{
-
GetRuntimeModule().MetadataImport.GetPropertyProps(
m_token, out _, out _, out ConstArray sig);
+ GC.KeepAlive(this);
m_signature = new Signature(sig.Signature.ToPointer(), sig.Length, m_declaringType);
}
@@ -210,6 +212,7 @@ public override Type[] GetOptionalCustomModifiers()
internal object GetConstantValue(bool raw)
{
object? defaultValue = MdConstant.GetValue(GetRuntimeModule().MetadataImport, m_token, PropertyType.TypeHandle, raw);
+ GC.KeepAlive(this);
if (defaultValue == DBNull.Value)
// Arg_EnumLitValueNotFound -> "Literal value was not found."
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/TypeNameParser.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/TypeNameParser.CoreCLR.cs
index 2af4bb792d4588..35fa0b5718e387 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/TypeNameParser.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/TypeNameParser.CoreCLR.cs
@@ -4,11 +4,9 @@
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
-using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Loader;
-using System.Text;
using System.Threading;
namespace System.Reflection
@@ -57,7 +55,13 @@ internal partial struct TypeNameParser
return null;
}
- return new TypeNameParser(typeName)
+ Metadata.TypeName? parsed = Metadata.TypeNameParser.Parse(typeName, throwOnError: throwOnError);
+ if (parsed is null)
+ {
+ return null;
+ }
+
+ return new TypeNameParser()
{
_assemblyResolver = assemblyResolver,
_typeResolver = typeResolver,
@@ -65,7 +69,7 @@ internal partial struct TypeNameParser
_ignoreCase = ignoreCase,
_extensibleParser = extensibleParser,
_requestingAssembly = requestingAssembly
- }.Parse();
+ }.Resolve(parsed);
}
[RequiresUnreferencedCode("The type might be removed")]
@@ -75,13 +79,24 @@ internal partial struct TypeNameParser
bool ignoreCase,
Assembly topLevelAssembly)
{
- return new TypeNameParser(typeName)
+ Metadata.TypeName? parsed = Metadata.TypeNameParser.Parse(typeName, throwOnError);
+
+ if (parsed is null)
+ {
+ return null;
+ }
+ else if (topLevelAssembly is not null && parsed.AssemblyName is not null)
+ {
+ return throwOnError ? throw new ArgumentException(SR.Argument_AssemblyGetTypeCannotSpecifyAssembly) : null;
+ }
+
+ return new TypeNameParser()
{
_throwOnError = throwOnError,
_ignoreCase = ignoreCase,
_topLevelAssembly = topLevelAssembly,
_requestingAssembly = topLevelAssembly
- }.Parse();
+ }.Resolve(parsed);
}
// Resolve type name referenced by a custom attribute metadata.
@@ -95,12 +110,13 @@ internal static RuntimeType GetTypeReferencedByCustomAttribute(string typeName,
RuntimeAssembly requestingAssembly = scope.GetRuntimeAssembly();
- RuntimeType? type = (RuntimeType?)new TypeNameParser(typeName)
+ Metadata.TypeName parsed = Metadata.TypeName.Parse(typeName);
+ RuntimeType? type = (RuntimeType?)new TypeNameParser()
{
_throwOnError = true,
_suppressContextualReflectionContext = true,
_requestingAssembly = requestingAssembly
- }.Parse();
+ }.Resolve(parsed);
Debug.Assert(type != null);
@@ -124,13 +140,19 @@ internal static RuntimeType GetTypeReferencedByCustomAttribute(string typeName,
return null;
}
- RuntimeType? type = (RuntimeType?)new TypeNameParser(typeName)
+ Metadata.TypeName? parsed = Metadata.TypeNameParser.Parse(typeName, throwOnError);
+ if (parsed is null)
+ {
+ return null;
+ }
+
+ RuntimeType? type = (RuntimeType?)new TypeNameParser()
{
_requestingAssembly = requestingAssembly,
_throwOnError = throwOnError,
_suppressContextualReflectionContext = true,
_requireAssemblyQualifiedName = requireAssemblyQualifiedName,
- }.Parse();
+ }.Resolve(parsed);
if (type != null)
RuntimeTypeHandle.RegisterCollectibleTypeDependency(type, requestingAssembly);
@@ -138,23 +160,12 @@ internal static RuntimeType GetTypeReferencedByCustomAttribute(string typeName,
return type;
}
- private bool CheckTopLevelAssemblyQualifiedName()
- {
- if (_topLevelAssembly is not null)
- {
- if (_throwOnError)
- throw new ArgumentException(SR.Argument_AssemblyGetTypeCannotSpecifyAssembly);
- return false;
- }
- return true;
- }
-
- private Assembly? ResolveAssembly(string assemblyName)
+ private Assembly? ResolveAssembly(AssemblyName assemblyName)
{
Assembly? assembly;
if (_assemblyResolver is not null)
{
- assembly = _assemblyResolver(new AssemblyName(assemblyName));
+ assembly = _assemblyResolver(assemblyName);
if (assembly is null && _throwOnError)
{
throw new FileNotFoundException(SR.Format(SR.FileNotFound_ResolveAssembly, assemblyName));
@@ -162,7 +173,7 @@ private bool CheckTopLevelAssemblyQualifiedName()
}
else
{
- assembly = RuntimeAssembly.InternalLoad(new AssemblyName(assemblyName), ref Unsafe.NullRef(),
+ assembly = RuntimeAssembly.InternalLoad(assemblyName, ref Unsafe.NullRef(),
_suppressContextualReflectionContext ? null : AssemblyLoadContext.CurrentContextualReflectionContext,
requestingAssembly: (RuntimeAssembly?)_requestingAssembly, throwOnFileNotFound: _throwOnError);
}
@@ -173,13 +184,14 @@ private bool CheckTopLevelAssemblyQualifiedName()
Justification = "TypeNameParser.GetType is marked as RequiresUnreferencedCode.")]
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2075:UnrecognizedReflectionPattern",
Justification = "TypeNameParser.GetType is marked as RequiresUnreferencedCode.")]
- private Type? GetType(string typeName, ReadOnlySpan nestedTypeNames, string? assemblyNameIfAny)
+ private Type? GetType(string escapedTypeName, // For nested types, it's Name. For other types it's FullName
+ ReadOnlySpan nestedTypeNames, Metadata.TypeName parsedName)
{
Assembly? assembly;
- if (assemblyNameIfAny is not null)
+ if (parsedName.AssemblyName is not null)
{
- assembly = ResolveAssembly(assemblyNameIfAny);
+ assembly = ResolveAssembly(parsedName.AssemblyName.ToAssemblyName());
if (assembly is null)
return null;
}
@@ -193,8 +205,6 @@ private bool CheckTopLevelAssemblyQualifiedName()
// Resolve the top level type.
if (_typeResolver is not null)
{
- string escapedTypeName = EscapeTypeName(typeName);
-
type = _typeResolver(assembly, escapedTypeName, _ignoreCase);
if (type is null)
@@ -216,28 +226,29 @@ private bool CheckTopLevelAssemblyQualifiedName()
{
if (_throwOnError)
{
- throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveType, EscapeTypeName(typeName)));
+ throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveType, escapedTypeName));
}
return null;
}
- return GetTypeFromDefaultAssemblies(typeName, nestedTypeNames);
+ return GetTypeFromDefaultAssemblies(UnescapeTypeName(escapedTypeName), nestedTypeNames, parsedName);
}
if (assembly is RuntimeAssembly runtimeAssembly)
{
+ string unescapedTypeName = UnescapeTypeName(escapedTypeName);
// Compat: Non-extensible parser allows ambiguous matches with ignore case lookup
if (!_extensibleParser || !_ignoreCase)
{
- return runtimeAssembly.GetTypeCore(typeName, nestedTypeNames, throwOnError: _throwOnError, ignoreCase: _ignoreCase);
+ return runtimeAssembly.GetTypeCore(unescapedTypeName, nestedTypeNames, throwOnError: _throwOnError, ignoreCase: _ignoreCase);
}
- type = runtimeAssembly.GetTypeCore(typeName, default, throwOnError: _throwOnError, ignoreCase: _ignoreCase);
+ type = runtimeAssembly.GetTypeCore(unescapedTypeName, default, throwOnError: _throwOnError, ignoreCase: _ignoreCase);
}
else
{
// This is a third-party Assembly object. Emulate GetTypeCore() by calling the public GetType()
// method. This is wasteful because it'll probably reparse a type string that we've already parsed
// but it can't be helped.
- type = assembly.GetType(EscapeTypeName(typeName), throwOnError: _throwOnError, ignoreCase: _ignoreCase);
+ type = assembly.GetType(escapedTypeName, throwOnError: _throwOnError, ignoreCase: _ignoreCase);
}
if (type is null)
@@ -257,7 +268,7 @@ private bool CheckTopLevelAssemblyQualifiedName()
if (_throwOnError)
{
throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveNestedType,
- nestedTypeNames[i], (i > 0) ? nestedTypeNames[i - 1] : typeName));
+ nestedTypeNames[i], (i > 0) ? nestedTypeNames[i - 1] : escapedTypeName));
}
return null;
}
@@ -266,12 +277,12 @@ private bool CheckTopLevelAssemblyQualifiedName()
return type;
}
- private Type? GetTypeFromDefaultAssemblies(string typeName, ReadOnlySpan nestedTypeNames)
+ private Type? GetTypeFromDefaultAssemblies(string typeName, ReadOnlySpan nestedTypeNames, Metadata.TypeName parsedName)
{
RuntimeAssembly? requestingAssembly = (RuntimeAssembly?)_requestingAssembly;
if (requestingAssembly is not null)
{
- Type? type = ((RuntimeAssembly)requestingAssembly).GetTypeCore(typeName, nestedTypeNames, throwOnError: false, ignoreCase: _ignoreCase);
+ Type? type = requestingAssembly.GetTypeCore(typeName, nestedTypeNames, throwOnError: false, ignoreCase: _ignoreCase);
if (type is not null)
return type;
}
@@ -279,12 +290,12 @@ private bool CheckTopLevelAssemblyQualifiedName()
RuntimeAssembly coreLib = (RuntimeAssembly)typeof(object).Assembly;
if (requestingAssembly != coreLib)
{
- Type? type = ((RuntimeAssembly)coreLib).GetTypeCore(typeName, nestedTypeNames, throwOnError: false, ignoreCase: _ignoreCase);
+ Type? type = coreLib.GetTypeCore(typeName, nestedTypeNames, throwOnError: false, ignoreCase: _ignoreCase);
if (type is not null)
return type;
}
- RuntimeAssembly? resolvedAssembly = AssemblyLoadContext.OnTypeResolve(requestingAssembly, EscapeTypeName(typeName, nestedTypeNames));
+ RuntimeAssembly? resolvedAssembly = AssemblyLoadContext.OnTypeResolve(requestingAssembly, parsedName.FullName);
if (resolvedAssembly is not null)
{
Type? type = resolvedAssembly.GetTypeCore(typeName, nestedTypeNames, throwOnError: false, ignoreCase: _ignoreCase);
@@ -293,7 +304,7 @@ private bool CheckTopLevelAssemblyQualifiedName()
}
if (_throwOnError)
- throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveTypeFromAssembly, EscapeTypeName(typeName), (requestingAssembly ?? coreLib).FullName));
+ throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveTypeFromAssembly, parsedName.FullName, (requestingAssembly ?? coreLib).FullName));
return null;
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/RuntimeHandles.cs b/src/coreclr/System.Private.CoreLib/src/System/RuntimeHandles.cs
index 001a9fcdfee6a8..380981993451e9 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/RuntimeHandles.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/RuntimeHandles.cs
@@ -650,14 +650,6 @@ internal static bool SatisfiesConstraints(RuntimeType paramType, RuntimeType[]?
}
}
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern IntPtr _GetMetadataImport(RuntimeType type);
-
- internal static MetadataImport GetMetadataImport(RuntimeType type)
- {
- return new MetadataImport(_GetMetadataImport(type), type);
- }
-
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "RuntimeTypeHandle_RegisterCollectibleTypeDependency")]
private static partial void RegisterCollectibleTypeDependency(QCallTypeHandle type, QCallAssembly assembly);
@@ -1247,8 +1239,6 @@ internal ModuleHandle(RuntimeModule module)
}
#endregion
- #region Internal FCalls
-
internal RuntimeModule GetRuntimeModule()
{
return m_ptr;
@@ -1278,6 +1268,7 @@ public bool Equals(ModuleHandle handle)
public static bool operator !=(ModuleHandle left, ModuleHandle right) => !left.Equals(right);
+ #region Internal FCalls
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern IRuntimeMethodInfo GetDynamicMethod(Reflection.Emit.DynamicMethod method, RuntimeModule module, string name, byte[] sig, Resolver resolver);
@@ -1336,7 +1327,7 @@ public RuntimeTypeHandle ResolveTypeHandle(int typeToken, RuntimeTypeHandle[]? t
}
catch (Exception)
{
- if (!GetMetadataImport(module).IsValidToken(typeToken))
+ if (!module.MetadataImport.IsValidToken(typeToken))
throw new ArgumentOutOfRangeException(nameof(typeToken),
SR.Format(SR.Argument_InvalidToken, typeToken, new ModuleHandle(module)));
throw;
@@ -1389,7 +1380,7 @@ internal static RuntimeMethodHandleInternal ResolveMethodHandleInternal(RuntimeM
}
catch (Exception)
{
- if (!GetMetadataImport(module).IsValidToken(methodToken))
+ if (!module.MetadataImport.IsValidToken(methodToken))
throw new ArgumentOutOfRangeException(nameof(methodToken),
SR.Format(SR.Argument_InvalidToken, methodToken, new ModuleHandle(module)));
throw;
@@ -1442,7 +1433,7 @@ public RuntimeFieldHandle ResolveFieldHandle(int fieldToken, RuntimeTypeHandle[]
}
catch (Exception)
{
- if (!GetMetadataImport(module).IsValidToken(fieldToken))
+ if (!module.MetadataImport.IsValidToken(fieldToken))
throw new ArgumentOutOfRangeException(nameof(fieldToken),
SR.Format(SR.Argument_InvalidToken, fieldToken, new ModuleHandle(module)));
throw;
@@ -1485,14 +1476,6 @@ internal static void GetPEKind(RuntimeModule module, out PortableExecutableKinds
internal static extern int GetMDStreamVersion(RuntimeModule module);
public int MDStreamVersion => GetMDStreamVersion(GetRuntimeModule());
-
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern IntPtr _GetMetadataImport(RuntimeModule module);
-
- internal static MetadataImport GetMetadataImport(RuntimeModule module)
- {
- return new MetadataImport(_GetMetadataImport(module), module);
- }
#endregion
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
index 56ff2e26850ccc..c6ea76fde9b85f 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
@@ -932,7 +932,8 @@ private void PopulateLiteralFields(Filter filter, RuntimeType declaringType, ref
if (MdToken.IsNullToken(tkDeclaringType))
return;
- MetadataImport scope = RuntimeTypeHandle.GetMetadataImport(declaringType);
+ RuntimeModule module = declaringType.GetRuntimeModule();
+ MetadataImport scope = module.MetadataImport;
scope.EnumFields(tkDeclaringType, out MetadataEnumResult tkFields);
@@ -976,6 +977,7 @@ private void PopulateLiteralFields(Filter filter, RuntimeType declaringType, ref
list.Add(runtimeFieldInfo);
}
}
+ GC.KeepAlive(module);
}
private void AddSpecialInterface(
@@ -1102,7 +1104,7 @@ private RuntimeType[] PopulateNestedClasses(Filter filter)
ListBuilder list = default;
ModuleHandle moduleHandle = new ModuleHandle(RuntimeTypeHandle.GetModule(declaringType));
- MetadataImport scope = ModuleHandle.GetMetadataImport(moduleHandle.GetRuntimeModule());
+ MetadataImport scope = moduleHandle.GetRuntimeModule().MetadataImport;
scope.EnumNestedTypes(tkEnclosingType, out MetadataEnumResult tkNestedClasses);
@@ -1174,7 +1176,8 @@ private void PopulateEvents(
if (MdToken.IsNullToken(tkDeclaringType))
return;
- MetadataImport scope = RuntimeTypeHandle.GetMetadataImport(declaringType);
+ RuntimeModule module = declaringType.GetRuntimeModule();
+ MetadataImport scope = module.MetadataImport;
scope.EnumEvents(tkDeclaringType, out MetadataEnumResult tkEvents);
@@ -1220,6 +1223,7 @@ private void PopulateEvents(
list.Add(eventInfo);
}
+ GC.KeepAlive(module);
}
private RuntimePropertyInfo[] PopulateProperties(Filter filter)
@@ -1286,7 +1290,8 @@ private void PopulateProperties(
if (MdToken.IsNullToken(tkDeclaringType))
return;
- MetadataImport scope = RuntimeTypeHandle.GetMetadataImport(declaringType);
+ RuntimeModule module = declaringType.GetRuntimeModule();
+ MetadataImport scope = module.MetadataImport;
scope.EnumProperties(tkDeclaringType, out MetadataEnumResult tkProperties);
@@ -1304,7 +1309,7 @@ private void PopulateProperties(
if (filter.RequiresStringComparison())
{
- MdUtf8String name = declaringType.GetRuntimeModule().MetadataImport.GetName(tkProperty);
+ MdUtf8String name = scope.GetName(tkProperty);
if (!filter.Match(name))
continue;
@@ -1399,6 +1404,7 @@ private void PopulateProperties(
list.Add(propertyInfo);
}
+ GC.KeepAlive(module);
}
#endregion
@@ -1571,7 +1577,9 @@ internal Type[] FunctionPointerReturnAndParameterTypes
while (type.IsNested)
type = type.DeclaringType!;
- m_namespace = RuntimeTypeHandle.GetMetadataImport((RuntimeType)type).GetNamespace(type.MetadataToken).ToString();
+ RuntimeModule module = ((RuntimeType)type).GetRuntimeModule();
+ m_namespace = module.MetadataImport.GetNamespace(type.MetadataToken).ToString();
+ GC.KeepAlive(module);
}
return m_namespace;
@@ -3499,8 +3507,9 @@ public override GenericParameterAttributes GenericParameterAttributes
if (!IsGenericParameter)
throw new InvalidOperationException(SR.Arg_NotGenericParameter);
-
- RuntimeTypeHandle.GetMetadataImport(this).GetGenericParamProps(MetadataToken, out GenericParameterAttributes attributes);
+ RuntimeModule module = GetRuntimeModule();
+ module.MetadataImport.GetGenericParamProps(MetadataToken, out GenericParameterAttributes attributes);
+ GC.KeepAlive(module);
return attributes;
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Type.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Type.CoreCLR.cs
index 03a92b709eb1a5..81caecb09c99aa 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Type.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Type.CoreCLR.cs
@@ -4,7 +4,6 @@
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
-using System.Runtime.Versioning;
using System.Security;
using StackCrawlMark = System.Threading.StackCrawlMark;
diff --git a/src/coreclr/clrdefinitions.cmake b/src/coreclr/clrdefinitions.cmake
index 4201c06692eeb5..1970b6c33c7544 100644
--- a/src/coreclr/clrdefinitions.cmake
+++ b/src/coreclr/clrdefinitions.cmake
@@ -16,6 +16,7 @@ elseif (CLR_CMAKE_TARGET_ARCH_ARM)
add_definitions(-DFEATURE_EMULATE_SINGLESTEP)
elseif (CLR_CMAKE_TARGET_ARCH_RISCV64)
add_definitions(-DFEATURE_EMULATE_SINGLESTEP)
+ add_compile_definitions($<$>>:FEATURE_MULTIREG_RETURN>)
endif (CLR_CMAKE_TARGET_ARCH_ARM64)
if (CLR_CMAKE_TARGET_UNIX)
diff --git a/src/coreclr/debug/daccess/cdac.cpp b/src/coreclr/debug/daccess/cdac.cpp
index 78625bf67f2d72..b7bb7585e6bcf7 100644
--- a/src/coreclr/debug/daccess/cdac.cpp
+++ b/src/coreclr/debug/daccess/cdac.cpp
@@ -28,8 +28,12 @@ namespace
int ReadFromTargetCallback(uint64_t addr, uint8_t* dest, uint32_t count, void* context)
{
- CDAC* cdac = reinterpret_cast(context);
- return cdac->ReadFromTarget(addr, dest, count);
+ ICorDebugDataTarget* target = reinterpret_cast(context);
+ HRESULT hr = ReadFromDataTarget(target, addr, dest, count);
+ if (FAILED(hr))
+ return hr;
+
+ return S_OK;
}
}
@@ -37,32 +41,37 @@ CDAC CDAC::Create(uint64_t descriptorAddr, ICorDebugDataTarget* target)
{
HMODULE cdacLib;
if (!TryLoadCDACLibrary(&cdacLib))
- return CDAC::Invalid();
+ return {};
+
+ decltype(&cdac_reader_init) init = reinterpret_cast(::GetProcAddress(cdacLib, "cdac_reader_init"));
+ _ASSERTE(init != nullptr);
+
+ intptr_t handle;
+ if (init(descriptorAddr, &ReadFromTargetCallback, target, &handle) != 0)
+ {
+ ::FreeLibrary(cdacLib);
+ return {};
+ }
- return CDAC{cdacLib, descriptorAddr, target};
+ return CDAC{cdacLib, handle, target};
}
-CDAC::CDAC(HMODULE module, uint64_t descriptorAddr, ICorDebugDataTarget* target)
- : m_module(module)
+CDAC::CDAC(HMODULE module, intptr_t handle, ICorDebugDataTarget* target)
+ : m_module{module}
+ , m_cdac_handle{handle}
, m_target{target}
{
- if (m_module == NULL)
- {
- m_cdac_handle = NULL;
- return;
- }
+ _ASSERTE(m_module != NULL && m_cdac_handle != 0 && m_target != NULL);
- decltype(&cdac_reader_init) init = reinterpret_cast(::GetProcAddress(m_module, "cdac_reader_init"));
+ m_target->AddRef();
decltype(&cdac_reader_get_sos_interface) getSosInterface = reinterpret_cast(::GetProcAddress(m_module, "cdac_reader_get_sos_interface"));
- _ASSERTE(init != nullptr && getSosInterface != nullptr);
-
- init(descriptorAddr, &ReadFromTargetCallback, this, &m_cdac_handle);
+ _ASSERTE(getSosInterface != nullptr);
getSosInterface(m_cdac_handle, &m_sos);
}
CDAC::~CDAC()
{
- if (m_cdac_handle != NULL)
+ if (m_cdac_handle)
{
decltype(&cdac_reader_free) free = reinterpret_cast(::GetProcAddress(m_module, "cdac_reader_free"));
_ASSERTE(free != nullptr);
@@ -77,12 +86,3 @@ IUnknown* CDAC::SosInterface()
{
return m_sos;
}
-
-int CDAC::ReadFromTarget(uint64_t addr, uint8_t* dest, uint32_t count)
-{
- HRESULT hr = ReadFromDataTarget(m_target, addr, dest, count);
- if (FAILED(hr))
- return hr;
-
- return S_OK;
-}
diff --git a/src/coreclr/debug/daccess/cdac.h b/src/coreclr/debug/daccess/cdac.h
index 54418dc549f1f0..51078ffcf2e46b 100644
--- a/src/coreclr/debug/daccess/cdac.h
+++ b/src/coreclr/debug/daccess/cdac.h
@@ -9,19 +9,16 @@ class CDAC final
public: // static
static CDAC Create(uint64_t descriptorAddr, ICorDebugDataTarget *pDataTarget);
- static CDAC Invalid()
- {
- return CDAC{nullptr, 0, nullptr};
- }
-
public:
+ CDAC() = default;
+
CDAC(const CDAC&) = delete;
CDAC& operator=(const CDAC&) = delete;
CDAC(CDAC&& other)
: m_module{ other.m_module }
, m_cdac_handle{ other.m_cdac_handle }
- , m_target{ other.m_target }
+ , m_target{ other.m_target.Extract() }
, m_sos{ other.m_sos.Extract() }
{
other.m_module = NULL;
@@ -34,7 +31,7 @@ class CDAC final
{
m_module = other.m_module;
m_cdac_handle = other.m_cdac_handle;
- m_target = other.m_target;
+ m_target = other.m_target.Extract();
m_sos = other.m_sos.Extract();
other.m_module = NULL;
@@ -54,15 +51,14 @@ class CDAC final
// This does not AddRef the returned interface
IUnknown* SosInterface();
- int ReadFromTarget(uint64_t addr, uint8_t* dest, uint32_t count);
private:
- CDAC(HMODULE module, uint64_t descriptorAddr, ICorDebugDataTarget* target);
+ CDAC(HMODULE module, intptr_t handle, ICorDebugDataTarget* target);
private:
HMODULE m_module;
intptr_t m_cdac_handle;
- ICorDebugDataTarget* m_target;
+ NonVMComHolder m_target;
NonVMComHolder m_sos;
};
diff --git a/src/coreclr/debug/daccess/daccess.cpp b/src/coreclr/debug/daccess/daccess.cpp
index 2f08750bc5c61b..37e7d37edd9f9c 100644
--- a/src/coreclr/debug/daccess/daccess.cpp
+++ b/src/coreclr/debug/daccess/daccess.cpp
@@ -30,6 +30,8 @@
#include
#else
extern "C" bool TryGetSymbol(ICorDebugDataTarget* dataTarget, uint64_t baseAddress, const char* symbolName, uint64_t* symbolAddress);
+// cDAC depends on symbol lookup to find the contract descriptor
+#define CAN_USE_CDAC
#endif
#include "dwbucketmanager.hpp"
@@ -3036,7 +3038,7 @@ class DacStreamManager
//----------------------------------------------------------------------------
ClrDataAccess::ClrDataAccess(ICorDebugDataTarget * pTarget, ICLRDataTarget * pLegacyTarget/*=0*/)
- : m_cdac{CDAC::Invalid()}
+ : m_cdac{}
{
SUPPORTS_DAC_HOST_ONLY; // ctor does no marshalling - don't check with DacCop
@@ -5493,15 +5495,16 @@ ClrDataAccess::Initialize(void)
IfFailRet(GetDacGlobalValues());
IfFailRet(DacGetHostVtPtrs());
+// TODO: [cdac] TryGetSymbol is only implemented for Linux, OSX, and Windows.
+#ifdef CAN_USE_CDAC
CLRConfigNoCache enable = CLRConfigNoCache::Get("ENABLE_CDAC");
if (enable.IsSet())
{
DWORD val;
if (enable.TryAsInteger(10, val) && val == 1)
{
- // TODO: [cdac] Get contract descriptor from exported symbol
uint64_t contractDescriptorAddr = 0;
- //if (TryGetSymbol(m_pTarget, m_globalBase, "DotNetRuntimeContractDescriptor", &contractDescriptorAddr))
+ if (TryGetSymbol(m_pTarget, m_globalBase, "DotNetRuntimeContractDescriptor", &contractDescriptorAddr))
{
m_cdac = CDAC::Create(contractDescriptorAddr, m_pTarget);
if (m_cdac.IsValid())
@@ -5514,6 +5517,7 @@ ClrDataAccess::Initialize(void)
}
}
}
+#endif
//
// DAC is now setup and ready to use
@@ -6946,7 +6950,7 @@ GetDacTableAddress(ICorDebugDataTarget* dataTarget, ULONG64 baseAddress, PULONG6
return E_INVALIDARG;
}
#endif
- // On MacOS, FreeBSD or NetBSD use the RVA include file
+ // On FreeBSD, NetBSD, or SunOS use the RVA include file
*dacTableAddress = baseAddress + DAC_TABLE_RVA;
#else
// Otherwise, try to get the dac table address via the export symbol
diff --git a/src/coreclr/debug/di/divalue.cpp b/src/coreclr/debug/di/divalue.cpp
index 285da47437ebc3..853bac550e4554 100644
--- a/src/coreclr/debug/di/divalue.cpp
+++ b/src/coreclr/debug/di/divalue.cpp
@@ -1840,6 +1840,10 @@ HRESULT CordbObjectValue::QueryInterface(REFIID id, void **pInterface)
{
*pInterface = static_cast(static_cast(this));
}
+ else if (id == IID_ICorDebugExceptionObjectValue2 && m_fIsExceptionObject)
+ {
+ *pInterface = static_cast(static_cast(this));
+ }
else if (id == IID_ICorDebugComObjectValue && m_fIsRcw)
{
*pInterface = static_cast(this);
@@ -2489,6 +2493,41 @@ HRESULT CordbObjectValue::EnumerateExceptionCallStack(ICorDebugExceptionObjectCa
return hr;
}
+HRESULT CordbObjectValue::ForceCatchHandlerFoundEvents(BOOL enableEvents)
+{
+ PUBLIC_API_ENTRY(this);
+ FAIL_IF_NEUTERED(this);
+ ATT_REQUIRE_STOPPED_MAY_FAIL(GetProcess());
+
+ HRESULT hr = S_OK;
+
+ EX_TRY
+ {
+ CordbProcess * pProcess = GetProcess();
+
+ CORDB_ADDRESS objAddr = m_valueHome.GetAddress();
+
+ IDacDbiInterface* pDAC = GetProcess()->GetDAC();
+ VMPTR_Object vmObj = pDAC->GetObject(objAddr);
+
+ DebuggerIPCEvent event;
+ CordbAppDomain * pAppDomain = GetAppDomain();
+ _ASSERTE (pAppDomain != NULL);
+
+ pProcess->InitIPCEvent(&event, DB_IPCE_FORCE_CATCH_HANDLER_FOUND, true, pAppDomain->GetADToken());
+ event.ForceCatchHandlerFoundData.enableEvents = enableEvents;
+ event.ForceCatchHandlerFoundData.vmObj = vmObj;
+
+ hr = pProcess->m_cordb->SendIPCEvent(pProcess, &event, sizeof(DebuggerIPCEvent));
+
+ _ASSERTE(event.type == DB_IPCE_CATCH_HANDLER_FOUND_RESULT);
+
+ hr = event.hr;
+ }
+ EX_CATCH_HRESULT(hr);
+ return hr;
+}
+
HRESULT CordbObjectValue::IsExceptionObject()
{
HRESULT hr = S_OK;
diff --git a/src/coreclr/debug/di/process.cpp b/src/coreclr/debug/di/process.cpp
index 9920ed2dce0146..67f1a963a53d52 100644
--- a/src/coreclr/debug/di/process.cpp
+++ b/src/coreclr/debug/di/process.cpp
@@ -10734,6 +10734,7 @@ HRESULT CordbRCEventThread::WaitForIPCEventFromProcess(CordbProcess * pProcess,
CordbAppDomain * pAppDomain,
DebuggerIPCEvent * pEvent)
{
+
CORDBRequireProcessStateOKAndSync(pProcess, pAppDomain);
DWORD dwStatus;
diff --git a/src/coreclr/debug/di/rspriv.h b/src/coreclr/debug/di/rspriv.h
index 4b3fff25ce69b2..9c6a98776d1cca 100644
--- a/src/coreclr/debug/di/rspriv.h
+++ b/src/coreclr/debug/di/rspriv.h
@@ -9170,6 +9170,7 @@ class CordbObjectValue : public CordbValue,
public ICorDebugHeapValue3,
public ICorDebugHeapValue4,
public ICorDebugExceptionObjectValue,
+ public ICorDebugExceptionObjectValue2,
public ICorDebugComObjectValue,
public ICorDebugDelegateObjectValue
{
@@ -9292,6 +9293,11 @@ class CordbObjectValue : public CordbValue,
//-----------------------------------------------------------
COM_METHOD EnumerateExceptionCallStack(ICorDebugExceptionObjectCallStackEnum** ppCallStackEnum);
+ //-----------------------------------------------------------
+ // ICorDebugExceptionObjectValue2
+ //-----------------------------------------------------------
+ COM_METHOD ForceCatchHandlerFoundEvents(BOOL enableEvents);
+
//-----------------------------------------------------------
// ICorDebugComObjectValue
//-----------------------------------------------------------
diff --git a/src/coreclr/debug/ee/debugger.cpp b/src/coreclr/debug/ee/debugger.cpp
index 7a5028e1fc2018..3723ab2c90354e 100644
--- a/src/coreclr/debug/ee/debugger.cpp
+++ b/src/coreclr/debug/ee/debugger.cpp
@@ -915,6 +915,7 @@ Debugger::Debugger()
m_unrecoverableError(FALSE),
m_ignoreThreadDetach(FALSE),
m_pMethodInfos(NULL),
+ m_pForceCatchHandlerFoundEventsTable(NULL),
m_mutex(CrstDebuggerMutex, (CrstFlags)(CRST_UNSAFE_ANYMODE | CRST_REENTRANCY | CRST_DEBUGGER_THREAD)),
#ifdef _DEBUG
m_mutexOwner(0),
@@ -956,8 +957,7 @@ Debugger::Debugger()
m_processId = GetCurrentProcessId();
- // Initialize these in ctor because we free them in dtor.
- // And we can't set them to some safe uninited value (like NULL).
+ m_pForceCatchHandlerFoundEventsTable = new ForceCatchHandlerFoundTable();
@@ -7448,8 +7448,8 @@ void Debugger::SendExceptionEventsWorker(
g_pDebugger->IncThreadsAtUnsafePlaces();
}
} // end of GCX_CCOP_EEINTERFACE();
- } //end if (m_sendExceptionsOutsideOfJMC && !SentDebugFirstChance())
+ } //end if (m_sendExceptionsOutsideOfJMC && !SentDebugFirstChance())
//
// If this is a JMC function, then we send a USER's first chance as well.
//
@@ -7846,11 +7846,14 @@ void Debugger::FirstChanceManagedExceptionCatcherFound(Thread *pThread,
}
}
+ BOOL forceSendCatchHandlerFound = FALSE;
+ {
+ GCX_COOP_EEINTERFACE();
+ forceSendCatchHandlerFound = ShouldSendCatchHandlerFound(pThread);
+ }
// Here we check if debugger opted-out of receiving exception related events from outside of JMC methods
// or this exception ever crossed JMC frame (in this case we have already sent user first chance event)
- if (m_sendExceptionsOutsideOfJMC ||
- isInJMCFunction ||
- pThread->GetExceptionState()->GetFlags()->SentDebugUserFirstChance())
+ if (isInJMCFunction || forceSendCatchHandlerFound)
{
if (pDebugJitInfo != NULL)
{
@@ -7979,9 +7982,15 @@ LONG Debugger::NotifyOfCHFFilter(EXCEPTION_POINTERS* pExceptionPointers, PVOID p
pExceptionPointers);
}
+ BOOL forceSendCatchHandlerFound = FALSE;
+ {
+ GCX_COOP_EEINTERFACE();
+ forceSendCatchHandlerFound = ShouldSendCatchHandlerFound(pThread);
+ }
+
// Here we check if debugger opted-out of receiving exception related events from outside of JMC methods
// or this exception ever crossed JMC frame (in this case we have already sent user first chance event)
- if (m_sendExceptionsOutsideOfJMC || pExState->GetFlags()->SentDebugUserFirstChance())
+ if (forceSendCatchHandlerFound)
{
SendCatchHandlerFound(pThread, fp, offset, dwFlags);
}
@@ -8008,6 +8017,34 @@ LONG Debugger::NotifyOfCHFFilter(EXCEPTION_POINTERS* pExceptionPointers, PVOID p
return EXCEPTION_CONTINUE_SEARCH;
}
+BOOL Debugger::ShouldSendCatchHandlerFound(Thread* pThread)
+{
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ MODE_COOPERATIVE;
+ }
+ CONTRACTL_END;
+
+ ThreadExceptionState* pExState = pThread->GetExceptionState();
+ if (m_sendExceptionsOutsideOfJMC || pExState->GetFlags()->SentDebugUserFirstChance())
+ {
+ return TRUE;
+ }
+ else
+ {
+ BOOL forceSendCatchHandlerFound = FALSE;
+ OBJECTHANDLE objHandle = pThread->GetThrowableAsHandle();
+ OBJECTHANDLE retrievedHandle = m_pForceCatchHandlerFoundEventsTable->Lookup(objHandle); //destroy handle
+ if (retrievedHandle != NULL)
+ {
+ forceSendCatchHandlerFound = TRUE;
+ }
+ return forceSendCatchHandlerFound;
+ }
+}
+
// Actually send the catch handler found event.
// This can be used to send CHF for both regular managed catchers as well
@@ -10427,6 +10464,27 @@ bool Debugger::HandleIPCEvent(DebuggerIPCEvent * pEvent)
}
break;
+ case DB_IPCE_FORCE_CATCH_HANDLER_FOUND:
+ {
+ BOOL enableEvents = pEvent->ForceCatchHandlerFoundData.enableEvents;
+ AppDomain *pAppDomain = pEvent->vmAppDomain.GetRawPtr();
+ OBJECTREF exObj = ObjectToOBJECTREF(pEvent->ForceCatchHandlerFoundData.vmObj.GetRawPtr());
+ HRESULT hr = E_INVALIDARG;
+
+ hr = UpdateForceCatchHandlerFoundTable(enableEvents, exObj, pAppDomain);
+
+ DebuggerIPCEvent * pIPCResult = m_pRCThread->GetIPCEventReceiveBuffer();
+ InitIPCEvent(pIPCResult,
+ DB_IPCE_CATCH_HANDLER_FOUND_RESULT,
+ g_pEEInterface->GetThread(),
+ pEvent->vmAppDomain);
+
+ pIPCResult->hr = hr;
+
+ m_pRCThread->SendIPCReply();
+ }
+ break;
+
case DB_IPCE_BREAKPOINT_ADD:
{
@@ -12350,6 +12408,44 @@ HRESULT Debugger::IsMethodDeoptimized(Module *pModule, mdMethodDef methodDef, BO
return S_OK;
}
+HRESULT Debugger::UpdateForceCatchHandlerFoundTable(BOOL enableEvents, OBJECTREF exObj, AppDomain *pAppDomain)
+{
+ CONTRACTL
+ {
+ THROWS;
+ CAN_TAKE_LOCK;
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ OBJECTHANDLE objHandle = pAppDomain->CreateLongWeakHandle(exObj);
+ if (objHandle == NULL)
+ {
+ return E_INVALIDARG;
+ }
+ if (enableEvents)
+ {
+ OBJECTHANDLE objHandleFound = m_pForceCatchHandlerFoundEventsTable->Lookup(objHandle);
+ if (objHandleFound == NULL)
+ {
+ m_pForceCatchHandlerFoundEventsTable->Add(objHandle);
+ }
+ else
+ {
+ DestroyLongWeakHandle(objHandle);
+ }
+ }
+ else
+ {
+ if (m_pForceCatchHandlerFoundEventsTable->Lookup(objHandle) != NULL)
+ {
+ m_pForceCatchHandlerFoundEventsTable->Remove(objHandle);
+ }
+ DestroyLongWeakHandle(objHandle);
+ }
+ return S_OK;
+}
+
//
// UnrecoverableError causes the Left Side to enter a state where no more
// debugging can occur and we leave around enough information for the
diff --git a/src/coreclr/debug/ee/debugger.h b/src/coreclr/debug/ee/debugger.h
index 38bbb73d2932e4..d66307fb146422 100644
--- a/src/coreclr/debug/ee/debugger.h
+++ b/src/coreclr/debug/ee/debugger.h
@@ -535,6 +535,61 @@ struct DebuggerPendingFuncEval
typedef DPTR(struct DebuggerPendingFuncEval) PTR_DebuggerPendingFuncEval;
+/* ------------------------------------------------------------------------ *
+ * SHash to hold weak object handles of exceptions with ForceCatchHandlerFound equal to true
+ * ------------------------------------------------------------------------ */
+#ifndef DACCESS_COMPILE
+class EMPTY_BASES_DECL ForceCatchHandlerFoundSHashTraits : public DefaultSHashTraits
+{
+ public:
+ typedef OBJECTHANDLE element_t;
+ typedef OBJECTHANDLE key_t;
+ static const bool s_supports_autoremove = true;
+ static const bool s_NoThrow = false;
+ static const bool s_RemovePerEntryCleanupAction = true;
+
+ static BOOL Equals(const OBJECTHANDLE &e, const OBJECTHANDLE &f)
+ {
+ return ObjectFromHandle(e) == ObjectFromHandle(f);
+ }
+ static OBJECTHANDLE GetKey(const OBJECTHANDLE &e)
+ {
+ return e;
+ }
+ static INT32 Hash(const OBJECTHANDLE &e)
+ {
+ return ObjectFromHandle(e)->GetHashCodeEx();
+ }
+ static bool ShouldDelete(const OBJECTHANDLE &e)
+ {
+ return ObjectHandleIsNull(e);
+ }
+ static OBJECTHANDLE Null()
+ {
+ OBJECTHANDLE e = (OBJECTHANDLE)(TADDR)0;
+ return e;
+ }
+ static bool IsNull(const OBJECTHANDLE &e)
+ {
+ return e == (OBJECTHANDLE)(TADDR)0;
+ }
+ static OBJECTHANDLE Deleted()
+ {
+ OBJECTHANDLE e = (OBJECTHANDLE)(TADDR)-1;
+ return e;
+ }
+ static bool IsDeleted(const OBJECTHANDLE &e)
+ {
+ return e == (OBJECTHANDLE)(TADDR)-1;
+ }
+ static void OnRemovePerEntryCleanupAction(const OBJECTHANDLE &e)
+ {
+ DestroyLongWeakHandle(e);
+ }
+};
+typedef SHash ForceCatchHandlerFoundTable;
+#endif
+
/* ------------------------------------------------------------------------ *
* DebuggerRCThread class -- the Runtime Controller thread.
* ------------------------------------------------------------------------ */
@@ -1917,6 +1972,8 @@ class Debugger : public DebugInterface
Module *classModule,
BOOL fIsLoadEvent);
+ BOOL ShouldSendCatchHandlerFound(Thread* pThread);
+
void SendCatchHandlerFound(Thread *pThread,
FramePointer fp,
SIZE_T nOffset,
@@ -2218,6 +2275,7 @@ class Debugger : public DebugInterface
HRESULT DeoptimizeMethod(Module* pModule, mdMethodDef methodDef);
#endif //DACCESS_COMPILE
HRESULT IsMethodDeoptimized(Module *pModule, mdMethodDef methodDef, BOOL *pResult);
+ HRESULT UpdateForceCatchHandlerFoundTable(BOOL enableEvents, OBJECTREF exObj, AppDomain *pAppDomain);
//
// The debugger mutex is used to protect any "global" Left Side
@@ -2806,6 +2864,11 @@ class Debugger : public DebugInterface
BOOL m_unrecoverableError;
BOOL m_ignoreThreadDetach;
PTR_DebuggerMethodInfoTable m_pMethodInfos;
+ #ifdef DACCESS_COMPILE
+ VOID * m_pForceCatchHandlerFoundEventsTable;
+ #else
+ ForceCatchHandlerFoundTable *m_pForceCatchHandlerFoundEventsTable;
+ #endif
// This is the main debugger lock. It is a large lock and used to synchronize complex operations
@@ -3550,7 +3613,7 @@ inline void * __cdecl operator new[](size_t n, const InteropSafe&)
return result;
}
-inline void * __cdecl operator new(size_t n, const InteropSafe&, const NoThrow&) throw()
+inline void * __cdecl operator new(size_t n, const InteropSafe&, const std::nothrow_t&) noexcept
{
CONTRACTL
{
@@ -3569,7 +3632,7 @@ inline void * __cdecl operator new(size_t n, const InteropSafe&, const NoThrow&)
return result;
}
-inline void * __cdecl operator new[](size_t n, const InteropSafe&, const NoThrow&) throw()
+inline void * __cdecl operator new[](size_t n, const InteropSafe&, const std::nothrow_t&) noexcept
{
CONTRACTL
{
diff --git a/src/coreclr/debug/inc/dbgipcevents.h b/src/coreclr/debug/inc/dbgipcevents.h
index 68f18192c24ab4..661fcfdd334786 100644
--- a/src/coreclr/debug/inc/dbgipcevents.h
+++ b/src/coreclr/debug/inc/dbgipcevents.h
@@ -2054,6 +2054,12 @@ struct MSLAYOUT DebuggerIPCEvent
VMPTR_Module pModule;
} DisableOptData;
+ struct MSLAYOUT
+ {
+ BOOL enableEvents;
+ VMPTR_Object vmObj;
+ } ForceCatchHandlerFoundData;
+
struct MSLAYOUT
{
LSPTR_BREAKPOINT breakpointToken;
diff --git a/src/coreclr/debug/inc/dbgipceventtypes.h b/src/coreclr/debug/inc/dbgipceventtypes.h
index 9c3a09afcf9b41..650156fdda0613 100644
--- a/src/coreclr/debug/inc/dbgipceventtypes.h
+++ b/src/coreclr/debug/inc/dbgipceventtypes.h
@@ -93,7 +93,8 @@ IPC_EVENT_TYPE1(DB_IPCE_DATA_BREAKPOINT ,0x0160)
IPC_EVENT_TYPE1(DB_IPCE_BEFORE_GARBAGE_COLLECTION ,0x0161)
IPC_EVENT_TYPE1(DB_IPCE_AFTER_GARBAGE_COLLECTION ,0x0162)
IPC_EVENT_TYPE1(DB_IPCE_DISABLE_OPTS_RESULT ,0x0163)
-IPC_EVENT_TYPE0(DB_IPCE_RUNTIME_LAST ,0x0165) // The last event from runtime
+IPC_EVENT_TYPE1(DB_IPCE_CATCH_HANDLER_FOUND_RESULT ,0x0165)
+IPC_EVENT_TYPE0(DB_IPCE_RUNTIME_LAST ,0x0166) // The last event from runtime
@@ -143,5 +144,6 @@ IPC_EVENT_TYPE2(DB_IPCE_GET_GCHANDLE_INFO ,0x0251)
IPC_EVENT_TYPE2(DB_IPCE_RESOLVE_UPDATE_METADATA_1 ,0x0256)
IPC_EVENT_TYPE2(DB_IPCE_RESOLVE_UPDATE_METADATA_2 ,0x0257)
IPC_EVENT_TYPE2(DB_IPCE_DISABLE_OPTS ,0x0258)
-IPC_EVENT_TYPE0(DB_IPCE_DEBUGGER_LAST ,0x025A) // The last event from the debugger
+IPC_EVENT_TYPE2(DB_IPCE_FORCE_CATCH_HANDLER_FOUND ,0x025A)
+IPC_EVENT_TYPE0(DB_IPCE_DEBUGGER_LAST ,0x025B) // The last event from the debugger
diff --git a/src/coreclr/debug/runtimeinfo/CMakeLists.txt b/src/coreclr/debug/runtimeinfo/CMakeLists.txt
index e6d45ada120131..77ecf9a4dd9bad 100644
--- a/src/coreclr/debug/runtimeinfo/CMakeLists.txt
+++ b/src/coreclr/debug/runtimeinfo/CMakeLists.txt
@@ -37,3 +37,58 @@ endif()
# publish runtimeinfo lib
install_clr(TARGETS runtimeinfo DESTINATIONS lib COMPONENT runtime)
+
+
+# cDAC contract descriptor
+
+if (NOT CDAC_BUILD_TOOL_BINARY_PATH)
+ # if CDAC_BUILD_TOOL_BINARY_PATH is unspecified (for example for a build without a .NET SDK or msbuild),
+ # link a stub contract descriptor into the runtime
+ add_library_clr(cdac_contract_descriptor OBJECT contractdescriptorstub.c)
+ message(STATUS "Using a stub cDAC contract descriptor")
+else()
+ # generate a contract descriptor using cdac-build-tool from a data descriptor and contract json file
+
+ add_library(cdac_data_descriptor OBJECT datadescriptor.cpp)
+ # don't build the data descriptor before the VM (and any of its dependencies' generated headers)
+ add_dependencies(cdac_data_descriptor cee_wks_core)
+ if(CLR_CMAKE_TARGET_WIN32)
+ # turn off whole program optimization:
+ # 1. it creates object files that cdac-build-tool can't read
+ # 2. we never link cdac_data_descriptor into the final product - it's only job is to be scraped
+ target_compile_options(cdac_data_descriptor PRIVATE /GL-)
+ endif()
+ target_include_directories(cdac_data_descriptor BEFORE PRIVATE ${VM_DIR})
+ target_include_directories(cdac_data_descriptor BEFORE PRIVATE ${VM_DIR}/${ARCH_SOURCES_DIR})
+ target_include_directories(cdac_data_descriptor PRIVATE ${CLR_DIR}/interop/inc)
+
+ set(GENERATED_CDAC_DESCRIPTOR_DIR "${CMAKE_CURRENT_BINARY_DIR}/cdac")
+ set(CONTRACT_DESCRIPTOR_OUTPUT "${GENERATED_CDAC_DESCRIPTOR_DIR}/contract-descriptor.c")
+ if(NOT EXISTS "${CDAC_BUILD_TOOL_BINARY_PATH}")
+ message(FATAL_ERROR "${CDAC_BUILD_TOOL_BINARY_PATH} does not exist")
+ endif()
+
+ set(CONTRACT_FILE "${CMAKE_CURRENT_SOURCE_DIR}/contracts.jsonc")
+
+ # generate the contract descriptor by running cdac-build-tool
+ # n.b. this just uses `dotnet` from the PATH. InitializeDotNetCli adds the apropropriate directory
+ add_custom_command(
+ OUTPUT "${CONTRACT_DESCRIPTOR_OUTPUT}"
+ VERBATIM
+ COMMAND ${CLR_DOTNET_HOST_PATH} ${CDAC_BUILD_TOOL_BINARY_PATH} compose -o "${CONTRACT_DESCRIPTOR_OUTPUT}" -c "${CONTRACT_FILE}" $
+ DEPENDS cdac_data_descriptor cee_wks_core $ "${CONTRACT_FILE}"
+ USES_TERMINAL
+ )
+
+ # It is important that cdac_contract_descriptor is an object library;
+ # if it was static, linking it into the final dll would not export
+ # DotNetRuntimeContractDescriptor since it is not referenced anywhere.
+ add_library_clr(cdac_contract_descriptor OBJECT
+ "${CONTRACT_DESCRIPTOR_OUTPUT}"
+ contractpointerdata.cpp
+ )
+ target_include_directories(cdac_contract_descriptor BEFORE PRIVATE ${VM_DIR})
+ target_include_directories(cdac_contract_descriptor BEFORE PRIVATE ${VM_DIR}/${ARCH_SOURCES_DIR})
+ target_include_directories(cdac_contract_descriptor PRIVATE ${CLR_DIR}/interop/inc)
+ add_dependencies(cdac_contract_descriptor cdac_data_descriptor cee_wks_core)
+endif()
diff --git a/src/coreclr/debug/runtimeinfo/contractdescriptorstub.c b/src/coreclr/debug/runtimeinfo/contractdescriptorstub.c
new file mode 100644
index 00000000000000..59421a6692d2a7
--- /dev/null
+++ b/src/coreclr/debug/runtimeinfo/contractdescriptorstub.c
@@ -0,0 +1,39 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#include
+
+#ifdef _MSC_VER
+#define DLLEXPORT __declspec(dllexport)
+#else
+#define DLLEXPORT __attribute__((visibility("default")))
+#endif
+
+struct DotNetRuntimeContractDescriptor
+{
+ uint64_t magic;
+ uint32_t flags;
+ const uint32_t descriptor_size;
+ const char *descriptor;
+ const uint32_t pointer_data_count;
+ uint32_t pad0;
+ const uintptr_t *pointer_data;
+};
+
+extern const uintptr_t contractDescriptorPointerData[];
+
+// just the placeholder pointer
+const uintptr_t contractDescriptorPointerData[] = { (uintptr_t)0 };
+
+DLLEXPORT struct DotNetRuntimeContractDescriptor DotNetRuntimeContractDescriptor;
+
+#define STUB_DESCRIPTOR "{\"version\":0,\"baseline\":\"empty\",\"contracts\":{},\"types\":{},\"globals\":{}}"
+
+DLLEXPORT struct DotNetRuntimeContractDescriptor DotNetRuntimeContractDescriptor = {
+ .magic = 0x0043414443434e44ull, // "DNCCDAC\0"
+ .flags = 0x1u & (sizeof(void*) == 4 ? 0x02u : 0x00u),
+ .descriptor_size = sizeof(STUB_DESCRIPTOR),
+ .descriptor = STUB_DESCRIPTOR,
+ .pointer_data_count = 1,
+ .pointer_data = &contractDescriptorPointerData[0],
+};
diff --git a/src/coreclr/debug/runtimeinfo/contractpointerdata.cpp b/src/coreclr/debug/runtimeinfo/contractpointerdata.cpp
new file mode 100644
index 00000000000000..ae1440af4219a2
--- /dev/null
+++ b/src/coreclr/debug/runtimeinfo/contractpointerdata.cpp
@@ -0,0 +1,23 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#include "common.h"
+
+#include
+#include
+
+#include "threads.h"
+
+extern "C"
+{
+
+// without an extern declaration, clang does not emit this global into the object file
+extern const uintptr_t contractDescriptorPointerData[];
+
+const uintptr_t contractDescriptorPointerData[] = {
+ (uintptr_t)0, // placeholder
+#define CDAC_GLOBAL_POINTER(name,value) (uintptr_t)(value),
+#include "datadescriptor.h"
+};
+
+}
diff --git a/src/coreclr/debug/runtimeinfo/contracts.jsonc b/src/coreclr/debug/runtimeinfo/contracts.jsonc
new file mode 100644
index 00000000000000..186230d5c68d6f
--- /dev/null
+++ b/src/coreclr/debug/runtimeinfo/contracts.jsonc
@@ -0,0 +1,14 @@
+//algorithmic contracts for coreclr
+// The format of this file is: JSON with comments
+// {
+// "CONTRACT NAME": VERSION,
+// ...
+// }
+// CONTRACT NAME is an arbitrary string, VERSION is an integer
+//
+// cdac-build-tool can take multiple "-c contract_file" arguments
+// so to conditionally include contracts, put additional contracts in a separate file
+{
+ "SOSBreakingChangeVersion": 1 // example contract: "runtime exports an SOS breaking change version global"
+}
+
diff --git a/src/coreclr/debug/runtimeinfo/datadescriptor.cpp b/src/coreclr/debug/runtimeinfo/datadescriptor.cpp
new file mode 100644
index 00000000000000..99fe1cca7eeca7
--- /dev/null
+++ b/src/coreclr/debug/runtimeinfo/datadescriptor.cpp
@@ -0,0 +1,297 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#include "common.h"
+
+#include
+#include
+
+#include "static_assert.h"
+
+#include
+#include "threads.h"
+
+// begin blob definition
+
+extern "C"
+{
+
+struct TypeSpec
+{
+ uint32_t Name;
+ uint32_t Fields;
+ uint16_t Size; // note: C++ fragile no designated initializers - Size must come after Name and Fields
+};
+
+struct FieldSpec
+{
+ uint32_t Name;
+ uint32_t TypeName;
+ uint16_t FieldOffset;
+};
+
+struct GlobalLiteralSpec
+{
+ uint32_t Name;
+ uint32_t TypeName;
+ uint64_t Value;
+};
+
+struct GlobalPointerSpec
+{
+ uint32_t Name;
+ uint32_t PointerDataIndex;
+};
+
+#define CONCAT(token1,token2) token1 ## token2
+#define CONCAT4(token1, token2, token3, token4) token1 ## token2 ## token3 ## token4
+
+#define MAKE_TYPELEN_NAME(tyname) CONCAT(cdac_string_pool_typename__, tyname)
+#define MAKE_FIELDLEN_NAME(tyname,membername) CONCAT4(cdac_string_pool_membername__, tyname, __, membername)
+#define MAKE_FIELDTYPELEN_NAME(tyname,membername) CONCAT4(cdac_string_pool_membertypename__, tyname, __, membername)
+#define MAKE_GLOBALLEN_NAME(globalname) CONCAT(cdac_string_pool_globalname__, globalname)
+#define MAKE_GLOBALTYPELEN_NAME(globalname) CONCAT(cdac_string_pool_globaltypename__, globalname)
+
+// define a struct where the size of each field is the length of some string. we will use offsetof to get
+// the offset of each struct element, which will be equal to the offset of the beginning of that string in the
+// string pool.
+struct CDacStringPoolSizes
+{
+ char cdac_string_pool_nil; // make the first real string start at offset 1
+#define DECL_LEN(membername,len) char membername[(len)];
+#define CDAC_BASELINE(name) DECL_LEN(cdac_string_pool_baseline_, (sizeof(name)))
+#define CDAC_TYPE_BEGIN(name) DECL_LEN(MAKE_TYPELEN_NAME(name), sizeof(#name))
+#define CDAC_TYPE_FIELD(tyname,membertyname,membername,offset) DECL_LEN(MAKE_FIELDLEN_NAME(tyname,membername), sizeof(#membername)) \
+ DECL_LEN(MAKE_FIELDTYPELEN_NAME(tyname,membername), sizeof(#membertyname))
+#define CDAC_GLOBAL_POINTER(name,value) DECL_LEN(MAKE_GLOBALLEN_NAME(name), sizeof(#name))
+#define CDAC_GLOBAL(name,tyname,value) DECL_LEN(MAKE_GLOBALLEN_NAME(name), sizeof(#name)) \
+ DECL_LEN(MAKE_GLOBALTYPELEN_NAME(name), sizeof(#tyname))
+#include "datadescriptor.h"
+ char cdac_string_pool_trailing_nil;
+#undef DECL_LEN
+};
+
+#define GET_TYPE_NAME(name) offsetof(struct CDacStringPoolSizes, MAKE_TYPELEN_NAME(name))
+#define GET_FIELD_NAME(tyname,membername) offsetof(struct CDacStringPoolSizes, MAKE_FIELDLEN_NAME(tyname,membername))
+#define GET_FIELDTYPE_NAME(tyname,membername) offsetof(struct CDacStringPoolSizes, MAKE_FIELDTYPELEN_NAME(tyname,membername))
+#define GET_GLOBAL_NAME(globalname) offsetof(struct CDacStringPoolSizes, MAKE_GLOBALLEN_NAME(globalname))
+#define GET_GLOBALTYPE_NAME(globalname) offsetof(struct CDacStringPoolSizes, MAKE_GLOBALTYPELEN_NAME(globalname))
+
+// count the types
+enum
+{
+ CDacBlobTypesCount =
+#define CDAC_TYPES_BEGIN() 0
+#define CDAC_TYPE_BEGIN(name) + 1
+#include "datadescriptor.h"
+};
+
+// count the field pool size.
+// there's 1 placeholder element at the start, and 1 endmarker after each type
+enum
+{
+ CDacBlobFieldsPoolCount =
+#define CDAC_TYPES_BEGIN() 1
+#define CDAC_TYPE_FIELD(tyname,membertyname,membername,offset) + 1
+#define CDAC_TYPE_END(name) + 1
+#include "datadescriptor.h"
+};
+
+// count the literal globals
+enum
+{
+ CDacBlobGlobalLiteralsCount =
+#define CDAC_GLOBALS_BEGIN() 0
+#define CDAC_GLOBAL(name,tyname,value) + 1
+#include "datadescriptor.h"
+};
+
+// count the aux vector globals
+enum
+{
+ CDacBlobGlobalPointersCount =
+#define CDAC_GLOBALS_BEGIN() 0
+#define CDAC_GLOBAL_POINTER(name,value) + 1
+#include "datadescriptor.h"
+};
+
+
+#define MAKE_TYPEFIELDS_TYNAME(tyname) CONCAT(CDacFieldsPoolTypeStart__, tyname)
+
+// index of each run of fields.
+// we make a struct containing one 1-byte field for each field in the run, and then take the offset of the
+// struct to get the index of the run of fields.
+// this looks like
+//
+// struct CDacFieldsPoolSizes {
+// char cdac_fields_pool_start_placeholder__;
+// struct CDacFieldsPoolTypeStart__MethodTable {
+// char cdac_fields_pool_member__MethodTable__GCHandle;
+// char cdac_fields_pool_member__MethodTable_endmarker;
+// } CDacFieldsPoolTypeStart__MethodTable;
+// ...
+// };
+//
+// so that offsetof(struct CDacFieldsPoolSizes, CDacFieldsPoolTypeStart__MethodTable) will give the offset of the
+// method table field descriptors in the run of fields
+struct CDacFieldsPoolSizes
+{
+#define DECL_LEN(membername) char membername;
+#define CDAC_TYPES_BEGIN() DECL_LEN(cdac_fields_pool_start_placeholder__)
+#define CDAC_TYPE_BEGIN(name) struct MAKE_TYPEFIELDS_TYNAME(name) {
+#define CDAC_TYPE_FIELD(tyname,membertyname,membername,offset) DECL_LEN(CONCAT4(cdac_fields_pool_member__, tyname, __, membername))
+#define CDAC_TYPE_END(name) DECL_LEN(CONCAT4(cdac_fields_pool_member__, tyname, _, endmarker)) \
+ } MAKE_TYPEFIELDS_TYNAME(name);
+#include "datadescriptor.h"
+#undef DECL_LEN
+};
+
+#define GET_TYPE_FIELDS(tyname) offsetof(struct CDacFieldsPoolSizes, MAKE_TYPEFIELDS_TYNAME(tyname))
+
+// index of each global pointer
+//
+// struct CDacGlobalPointerIndex
+// {
+// char placeholder;
+// char firstGlobalPointerName;
+// char secondGlobalPointerName;
+// ...
+//}
+//
+// offsetof (CDACGlobalPointerIndex, NAME) returns the index of the global
+struct CDacGlobalPointerIndex
+{
+#define DECL_LEN(membername) char membername;
+#define CDAC_GLOBALS_BEGIN() DECL_LEN(cdac_global_pointer_index_start_placeholder__)
+#define CDAC_GLOBAL_POINTER(name,value) DECL_LEN(CONCAT(cdac_global_pointer_index__, name))
+#include "datadescriptor.h"
+#undef DECL_LEN
+};
+
+#define GET_GLOBAL_POINTER_INDEX(name) offsetof(struct CDacGlobalPointerIndex, CONCAT(cdac_global_pointer_index__, name))
+
+struct BinaryBlobDataDescriptor
+{
+ // The blob begins with a directory that gives the relative offsets of the `Baseline`, `Types`,
+ // `FieldsPool`, `GlobalLiteralValues`, `GlobalPointerValues` and `Names` fields of the blob.
+ // The number of elements of each of the arrays is next. This is followed by the sizes of the
+ // spec structs. Since `BinaryBlobDataDescriptor` is created via macros, we want to embed the
+ // `offsetof` and `sizeof` of the components of the blob into the blob itself without having to
+ // account for any padding that the C/C++ compiler may introduce to enforce alignment.
+ // Additionally the `Directory` tries to follow a common C/C++ alignment rule (we don't want
+ // padding introduced in the directory itself): N-byte members are aligned to start on N-byte
+ // boundaries.
+ struct Directory {
+ uint32_t FlagsAndBaselineStart;
+ uint32_t TypesStart;
+
+ uint32_t FieldsPoolStart;
+ uint32_t GlobalLiteralValuesStart;
+
+ uint32_t GlobalPointersStart;
+ uint32_t NamesPoolStart;
+
+ uint32_t TypeCount;
+ uint32_t FieldsPoolCount;
+
+ uint32_t GlobalLiteralValuesCount;
+ uint32_t GlobalPointerValuesCount;
+
+ uint32_t NamesPoolCount;
+
+ uint8_t TypeSpecSize;
+ uint8_t FieldSpecSize;
+ uint8_t GlobalLiteralSpecSize;
+ uint8_t GlobalPointerSpecSize;
+ } Directory;
+ uint32_t PlatformFlags;
+ uint32_t BaselineName;
+ struct TypeSpec Types[CDacBlobTypesCount];
+ struct FieldSpec FieldsPool[CDacBlobFieldsPoolCount];
+ struct GlobalLiteralSpec GlobalLiteralValues[CDacBlobGlobalLiteralsCount];
+ struct GlobalPointerSpec GlobalPointerValues[CDacBlobGlobalPointersCount];
+ uint8_t NamesPool[sizeof(struct CDacStringPoolSizes)];
+ uint8_t EndMagic[4];
+};
+
+struct MagicAndBlob {
+ uint64_t magic;
+ struct BinaryBlobDataDescriptor Blob;
+};
+
+// we only support 32-bit and 64-bit right now
+static_assert_no_msg(sizeof(void*) == 4 || sizeof(void*) == 8);
+
+// C-style designated initializers are a C++20 feature. Have to use plain old aggregate initialization instead.
+
+DLLEXPORT
+struct MagicAndBlob BlobDataDescriptor = {
+ /*.magic = */ 0x00424F4C42434144ull,// "DACBLOB",
+ /*.Blob =*/ {
+ /*.Directory =*/ {
+ /* .FlagsAndBaselineStart = */ offsetof(struct BinaryBlobDataDescriptor, PlatformFlags),
+ /* .TypesStart = */ offsetof(struct BinaryBlobDataDescriptor, Types),
+ /* .FieldsPoolStart = */ offsetof(struct BinaryBlobDataDescriptor, FieldsPool),
+ /* .GlobalLiteralValuesStart = */ offsetof(struct BinaryBlobDataDescriptor, GlobalLiteralValues),
+ /* .GlobalPointersStart = */ offsetof(struct BinaryBlobDataDescriptor, GlobalPointerValues),
+ /* .NamesPoolStart = */ offsetof(struct BinaryBlobDataDescriptor, NamesPool),
+ /* .TypeCount = */ CDacBlobTypesCount,
+ /* .FieldsPoolCount = */ CDacBlobFieldsPoolCount,
+ /* .GlobalLiteralValuesCount = */ CDacBlobGlobalLiteralsCount,
+ /* .GlobalPointerValuesCount = */ CDacBlobGlobalPointersCount,
+ /* .NamesPoolCount = */ sizeof(struct CDacStringPoolSizes),
+ /* .TypeSpecSize = */ sizeof(struct TypeSpec),
+ /* .FieldSpecSize = */ sizeof(struct FieldSpec),
+ /* .GlobalLiteralSpecSize = */ sizeof(struct GlobalLiteralSpec),
+ /* .GlobalPointerSpecSize = */ sizeof(struct GlobalPointerSpec),
+ },
+ /* .PlatformFlags = */ (sizeof(void*) == 4 ? 0x02 : 0) | 0x01,
+ /* .BaselineName = */ offsetof(struct CDacStringPoolSizes, cdac_string_pool_baseline_),
+
+ /* .Types = */ {
+#define CDAC_TYPE_BEGIN(name) { \
+ /* .Name = */ GET_TYPE_NAME(name), \
+ /* .Fields = */ GET_TYPE_FIELDS(name),
+#define CDAC_TYPE_INDETERMINATE(name) /*.Size = */ 0,
+#define CDAC_TYPE_SIZE(size) /* .Size = */ size,
+#define CDAC_TYPE_END(name) },
+#include "datadescriptor.h"
+ },
+
+ /* .FieldsPool = */ {
+#define CDAC_TYPES_BEGIN() {0,},
+#define CDAC_TYPE_FIELD(tyname,membertyname,membername,offset) { \
+ /* .Name = */ GET_FIELD_NAME(tyname,membername), \
+ /* .TypeName = */ GET_FIELDTYPE_NAME(tyname,membername), \
+ /* .FieldOffset = */ offset, \
+},
+#define CDAC_TYPE_END(name) { 0, },
+#include "datadescriptor.h"
+ },
+
+ /* .GlobalLiteralValues = */ {
+#define CDAC_GLOBAL(name,tyname,value) { /*.Name = */ GET_GLOBAL_NAME(name), /* .TypeName = */ GET_GLOBALTYPE_NAME(name), /* .Value = */ value },
+#include "datadescriptor.h"
+ },
+
+ /* .GlobalPointerValues = */ {
+#define CDAC_GLOBAL_POINTER(name,value) { /* .Name = */ GET_GLOBAL_NAME(name), /* .PointerDataIndex = */ GET_GLOBAL_POINTER_INDEX(name) },
+#include "datadescriptor.h"
+ },
+
+ /* .NamesPool = */ ("\0" // starts with a nul
+#define CDAC_BASELINE(name) name "\0"
+#define CDAC_TYPE_BEGIN(name) #name "\0"
+#define CDAC_TYPE_FIELD(tyname,membertyname,membername,offset) #membername "\0" #membertyname "\0"
+#define CDAC_GLOBAL_POINTER(name,value) #name "\0"
+#define CDAC_GLOBAL(name,tyname,value) #name "\0" #tyname "\0"
+#include "datadescriptor.h"
+ ),
+
+ /* .EndMagic = */ { 0x01, 0x02, 0x03, 0x04 },
+ }
+};
+
+// end blob definition
+
+} // extern "C"
diff --git a/src/coreclr/debug/runtimeinfo/datadescriptor.h b/src/coreclr/debug/runtimeinfo/datadescriptor.h
new file mode 100644
index 00000000000000..b5ab51774e1215
--- /dev/null
+++ b/src/coreclr/debug/runtimeinfo/datadescriptor.h
@@ -0,0 +1,139 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+//
+// No include guards. This file is included multiple times.
+
+// The format is:
+// CDAC_BASELINE("string") baseline data contract that the runtime should follow. "empty" is reasonable
+// CDAC_TYPES_BEGIN()
+// ... ...
+// CDAC_TYPES_END()
+// CDAC_GLOBALS_BEGIN()
+// ... ...
+// CDAC_GLOBALS_END()
+//
+// In the format is:
+// CDAC_TYPE_BEGIN(cdacTypeIdentifier) // defined a new data descriptor named cdacIdentifier
+//
+// CDAC_TYPE_SIZE(k) -or- CDAC_TYPE_INDETERMINATE(cdacTypeIdentifier) specifies that the type has
+// size k (bytes - usually sizeof(SomeNativeType)) or specify that the type's size is not provided
+// It is important that CDAC_TYPE_SIZE or CDAC_TYPE_INDETERMINATE immediately follows
+// CDAC_TYPE_BEGIN
+//
+// CDAC_TYPE_FIELD(cdacTypeIdentifier, cdacFieldTypeIdentifier, cdacFieldName, k) specifies the
+// field of "cdacTypeIdentifier" that has name cdacFieldName and has the type
+// "cdacFieldtypeIdentifier" located at offset k in the type layout. k is usually
+// offsetof(SomeClass, m_FieldName) if the field is public
+//
+// if the field is private, the convention is that SomeClass declares a friend struct
+// cdac_offsets and provides a specialization of cdac_offsets with a public constexpr
+// size_t member that holds the offset:
+//
+// class MyClass {
+// private:
+// void* m_myField;
+// friend template cdac_offsets;
+// };
+// template<> struct cdac_offsets {
+// static constexpr size_t MyField = offsetof(MyClass, m_myField);
+// };
+//
+// then the field layout can be specified as
+// CDAC_TYPE_FIELD(MyClassLayout, pointer, MyField, cdac_offsets::MyField)
+// There can be zero or more CDAC_TYPE_FIELD entries per type layout
+//
+// CDAC_TYPE_END(cdacTypeIdentifier) specifies the end of the type layout for cdacTypeIdentifier
+//
+// In the format is:
+//
+// CDAC_GLOBAL(cdacGlobalName, cdacTypeIdentifier, value)
+// or
+// CDAC_GLOBAL_POINTER(cdacGlobalName, cdacTypeIdentifier, address)
+//
+// Zero or more globals can be defined
+//
+// if a global is given with CDAC_GLOBAL(), `value` should be a constexpr uint64_t (or convertible
+// to uint64_t) for example, it can be a literal constant or a preprocessor definition
+//
+// if a global is a CDAC_GLOBAL_POINTER(), address should be a constexpr pointer or a constexpr
+// uintptr_t
+//
+//
+//
+// This file is compiled using the target architecture. Preprocessor defines for the target
+// platform will be available. It is ok to use `#ifdef`.
+
+#ifndef CDAC_BASELINE
+#define CDAC_BASELINE(identifier)
+#endif
+#ifndef CDAC_TYPES_BEGIN
+#define CDAC_TYPES_BEGIN()
+#endif
+#ifndef CDAC_TYPE_BEGIN
+#define CDAC_TYPE_BEGIN(tyname)
+#endif
+#ifndef CDAC_TYPE_SIZE
+#define CDAC_TYPE_SIZE(k)
+#endif
+#ifndef CDAC_TYPE_INDETERMINATE
+#define CDAC_TYPE_INDETERMINATE(tyname)
+#endif
+#ifndef CDAC_TYPE_FIELD
+#define CDAC_TYPE_FIELD(tyname,fieldtyname,fieldname,off)
+#endif
+#ifndef CDAC_TYPE_END
+#define CDAC_TYPE_END(tyname)
+#endif
+#ifndef CDAC_TYPES_END
+#define CDAC_TYPES_END()
+#endif
+#ifndef CDAC_GLOBALS_BEGIN
+#define CDAC_GLOBALS_BEGIN()
+#endif
+#ifndef CDAC_GLOBAL
+#define CDAC_GLOBAL(globalname,tyname,val)
+#endif
+#ifndef CDAC_GLOBAL_POINTER
+#define CDAC_GLOBAL_POINTER(globalname,addr)
+#endif
+#ifndef CDAC_GLOBALS_END
+#define CDAC_GLOBALS_END()
+#endif
+
+CDAC_BASELINE("empty")
+CDAC_TYPES_BEGIN()
+
+CDAC_TYPE_BEGIN(ManagedThread)
+CDAC_TYPE_INDETERMINATE(ManagedThread)
+CDAC_TYPE_FIELD(ManagedThread, GCHandle, GCHandle, cdac_offsets::ExposedObject)
+CDAC_TYPE_FIELD(ManagedThread, pointer, LinkNext, cdac_offsets::Link)
+CDAC_TYPE_END(ManagedThread)
+
+CDAC_TYPE_BEGIN(GCHandle)
+CDAC_TYPE_SIZE(sizeof(OBJECTHANDLE))
+CDAC_TYPE_END(GCHandle)
+
+CDAC_TYPES_END()
+
+CDAC_GLOBALS_BEGIN()
+CDAC_GLOBAL_POINTER(ManagedThreadStore, &ThreadStore::s_pThreadStore)
+#if FEATURE_EH_FUNCLETS
+CDAC_GLOBAL(FeatureEHFunclets, uint8, 1)
+#else
+CDAC_GLOBAL(FeatureEHFunclets, uint8, 0)
+#endif
+CDAC_GLOBAL(SOSBreakingChangeVersion, uint8, SOS_BREAKING_CHANGE_VERSION)
+CDAC_GLOBALS_END()
+
+#undef CDAC_BASELINE
+#undef CDAC_TYPES_BEGIN
+#undef CDAC_TYPE_BEGIN
+#undef CDAC_TYPE_INDETERMINATE
+#undef CDAC_TYPE_SIZE
+#undef CDAC_TYPE_FIELD
+#undef CDAC_TYPE_END
+#undef CDAC_TYPES_END
+#undef CDAC_GLOBALS_BEGIN
+#undef CDAC_GLOBAL
+#undef CDAC_GLOBAL_POINTER
+#undef CDAC_GLOBALS_END
diff --git a/src/coreclr/debug/shared/dbgtransportsession.cpp b/src/coreclr/debug/shared/dbgtransportsession.cpp
index 3bebb8282aed7d..f03a62c62ea994 100644
--- a/src/coreclr/debug/shared/dbgtransportsession.cpp
+++ b/src/coreclr/debug/shared/dbgtransportsession.cpp
@@ -2203,6 +2203,7 @@ DWORD DbgTransportSession::GetEventSize(DebuggerIPCEvent *pEvent)
case DB_IPCE_BEFORE_GARBAGE_COLLECTION:
case DB_IPCE_AFTER_GARBAGE_COLLECTION:
case DB_IPCE_DISABLE_OPTS_RESULT:
+ case DB_IPCE_CATCH_HANDLER_FOUND_RESULT:
cbAdditionalSize = 0;
break;
@@ -2501,6 +2502,9 @@ DWORD DbgTransportSession::GetEventSize(DebuggerIPCEvent *pEvent)
case DB_IPCE_DISABLE_OPTS:
cbAdditionalSize = sizeof(pEvent->DisableOptData);
break;
+ case DB_IPCE_FORCE_CATCH_HANDLER_FOUND:
+ cbAdditionalSize = sizeof(pEvent->ForceCatchHandlerFoundData);
+ break;
default:
STRESS_LOG1(LF_CORDB, LL_INFO1000, "Unknown debugger event type: 0x%x\n", (pEvent->type & DB_IPCE_TYPE_MASK));
diff --git a/src/coreclr/dlls/mscoree/coreclr/CMakeLists.txt b/src/coreclr/dlls/mscoree/coreclr/CMakeLists.txt
index c600af1fb6aada..7ba58d0297f9a9 100644
--- a/src/coreclr/dlls/mscoree/coreclr/CMakeLists.txt
+++ b/src/coreclr/dlls/mscoree/coreclr/CMakeLists.txt
@@ -109,6 +109,7 @@ set(CORECLR_LIBRARIES
interop
coreclrminipal
gc_pal
+ cdac_contract_descriptor
)
if(CLR_CMAKE_TARGET_ARCH_AMD64)
diff --git a/src/coreclr/dlls/mscoree/mscorwks_ntdef.src b/src/coreclr/dlls/mscoree/mscorwks_ntdef.src
index 0ac421b63e0718..a2076bd62433c9 100644
--- a/src/coreclr/dlls/mscoree/mscorwks_ntdef.src
+++ b/src/coreclr/dlls/mscoree/mscorwks_ntdef.src
@@ -28,3 +28,6 @@ EXPORTS
; Used by profilers
MetaDataGetDispenser
+
+ ; cDAC contract descriptor
+ DotNetRuntimeContractDescriptor
diff --git a/src/coreclr/dlls/mscoree/mscorwks_unixexports.src b/src/coreclr/dlls/mscoree/mscorwks_unixexports.src
index a35a59c095604a..3eacb7fa484856 100644
--- a/src/coreclr/dlls/mscoree/mscorwks_unixexports.src
+++ b/src/coreclr/dlls/mscoree/mscorwks_unixexports.src
@@ -14,3 +14,6 @@ g_dacTable
; Used by profilers
MetaDataGetDispenser
+
+; cDAC contract descriptor
+DotNetRuntimeContractDescriptor
diff --git a/src/coreclr/gc/gc.cpp b/src/coreclr/gc/gc.cpp
index e43047cf6e113a..9b6f5515e37a5e 100644
--- a/src/coreclr/gc/gc.cpp
+++ b/src/coreclr/gc/gc.cpp
@@ -388,7 +388,7 @@ int relative_index_power2_free_space (size_t power2)
#ifdef BACKGROUND_GC
uint32_t bgc_alloc_spin_count = 140;
-uint32_t bgc_alloc_spin_count_loh = 16;
+uint32_t bgc_alloc_spin_count_uoh = 16;
uint32_t bgc_alloc_spin = 2;
inline
@@ -2657,13 +2657,10 @@ size_t gc_heap::end_loh_size = 0;
size_t gc_heap::bgc_begin_poh_size = 0;
size_t gc_heap::end_poh_size = 0;
+size_t gc_heap::uoh_a_no_bgc[uoh_generation_count] = {};
+size_t gc_heap::uoh_a_bgc_marking[uoh_generation_count] = {};
+size_t gc_heap::uoh_a_bgc_planning[uoh_generation_count] = {};
#ifdef BGC_SERVO_TUNING
-uint64_t gc_heap::loh_a_no_bgc = 0;
-
-uint64_t gc_heap::loh_a_bgc_marking = 0;
-
-uint64_t gc_heap::loh_a_bgc_planning = 0;
-
size_t gc_heap::bgc_maxgen_end_fl_size = 0;
#endif //BGC_SERVO_TUNING
@@ -2794,9 +2791,9 @@ FinalizerWorkItem* gc_heap::finalizer_work;
BOOL gc_heap::proceed_with_gc_p = FALSE;
GCSpinLock gc_heap::gc_lock;
-#ifdef BGC_SERVO_TUNING
-uint64_t gc_heap::total_loh_a_last_bgc = 0;
-#endif //BGC_SERVO_TUNING
+#ifdef BACKGROUND_GC
+uint64_t gc_heap::total_uoh_a_last_bgc = 0;
+#endif //BACKGROUND_GC
#ifdef USE_REGIONS
region_free_list gc_heap::global_regions_to_decommit[count_free_region_kinds];
@@ -15039,10 +15036,13 @@ gc_heap::init_gc_heap (int h_number)
make_mark_stack(arr);
#ifdef BACKGROUND_GC
+ for (int i = uoh_start_generation; i < total_generation_count; i++)
+ {
+ uoh_a_no_bgc[i - uoh_start_generation] = 0;
+ uoh_a_bgc_marking[i - uoh_start_generation] = 0;
+ uoh_a_bgc_planning[i - uoh_start_generation] = 0;
+ }
#ifdef BGC_SERVO_TUNING
- loh_a_no_bgc = 0;
- loh_a_bgc_marking = 0;
- loh_a_bgc_planning = 0;
bgc_maxgen_end_fl_size = 0;
#endif //BGC_SERVO_TUNING
freeable_soh_segment = 0;
@@ -18424,6 +18424,29 @@ bool gc_heap::should_retry_other_heap (int gen_number, size_t size)
}
}
+void gc_heap::bgc_record_uoh_allocation(int gen_number, size_t size)
+{
+ assert((gen_number >= uoh_start_generation) && (gen_number < total_generation_count));
+
+ if (gc_heap::background_running_p())
+ {
+ background_uoh_alloc_count++;
+
+ if (current_c_gc_state == c_gc_state_planning)
+ {
+ uoh_a_bgc_planning[gen_number - uoh_start_generation] += size;
+ }
+ else
+ {
+ uoh_a_bgc_marking[gen_number - uoh_start_generation] += size;
+ }
+ }
+ else
+ {
+ uoh_a_no_bgc[gen_number - uoh_start_generation] += size;
+ }
+}
+
allocation_state gc_heap::allocate_uoh (int gen_number,
size_t size,
alloc_context* acontext,
@@ -18446,26 +18469,12 @@ allocation_state gc_heap::allocate_uoh (int gen_number,
#endif //RECORD_LOH_STATE
#ifdef BACKGROUND_GC
+ bgc_record_uoh_allocation(gen_number, size);
+
if (gc_heap::background_running_p())
{
-#ifdef BGC_SERVO_TUNING
- bool planning_p = (current_c_gc_state == c_gc_state_planning);
-#endif //BGC_SERVO_TUNING
-
- background_uoh_alloc_count++;
- //if ((background_loh_alloc_count % bgc_alloc_spin_count_loh) == 0)
+ //if ((background_uoh_alloc_count % bgc_alloc_spin_count_uoh) == 0)
{
-#ifdef BGC_SERVO_TUNING
- if (planning_p)
- {
- loh_a_bgc_planning += size;
- }
- else
- {
- loh_a_bgc_marking += size;
- }
-#endif //BGC_SERVO_TUNING
-
int spin_for_allocation = (gen_number == loh_generation) ?
bgc_loh_allocate_spin() :
bgc_poh_allocate_spin();
@@ -18491,12 +18500,6 @@ allocation_state gc_heap::allocate_uoh (int gen_number,
}
}
}
-#ifdef BGC_SERVO_TUNING
- else
- {
- loh_a_no_bgc += size;
- }
-#endif //BGC_SERVO_TUNING
#endif //BACKGROUND_GC
gc_reason gr = reason_oos_loh;
@@ -29966,7 +29969,7 @@ void gc_heap::mark_phase (int condemned_gen_number)
#endif //MULTIPLE_HEAPS
{
#ifdef FEATURE_EVENT_TRACE
- record_mark_time (gc_time_info[time_plan - 1], current_mark_time, last_mark_time);
+ record_mark_time (gc_time_info[time_mark_long_weak], current_mark_time, last_mark_time);
gc_time_info[time_plan] = last_mark_time;
#endif //FEATURE_EVENT_TRACE
@@ -33964,26 +33967,12 @@ void gc_heap::plan_phase (int condemned_gen_number)
if (gc_t_join.joined())
#endif //MULTIPLE_HEAPS
{
-#ifdef FEATURE_EVENT_TRACE
- if (informational_event_enabled_p)
- {
- uint64_t current_time = GetHighPrecisionTimeStamp();
- gc_time_info[time_compact] = current_time - gc_time_info[time_compact];
- }
-#endif //FEATURE_EVENT_TRACE
-
#ifdef MULTIPLE_HEAPS
for (int i = 0; i < n_heaps; i++)
{
-#ifdef USE_REGIONS
- g_heaps [i]->rearrange_uoh_segments();
-#endif //USE_REGIONS
g_heaps [i]->rearrange_heap_segments (TRUE);
}
#else //MULTIPLE_HEAPS
-#ifdef USE_REGIONS
- rearrange_uoh_segments();
-#endif //USE_REGIONS
rearrange_heap_segments (TRUE);
#endif //MULTIPLE_HEAPS
@@ -34018,7 +34007,7 @@ void gc_heap::plan_phase (int condemned_gen_number)
#endif //MULTIPLE_HEAPS
#ifdef FEATURE_EVENT_TRACE
- if (informational_event_enabled_p && (condemned_gen_number < (max_generation -1)))
+ if (informational_event_enabled_p)
{
uint64_t current_time = GetHighPrecisionTimeStamp();
gc_time_info[time_compact] = current_time - gc_time_info[time_compact];
@@ -40638,7 +40627,7 @@ void gc_heap::bgc_tuning::record_and_adjust_bgc_end()
calculate_tuning (max_generation, true);
- if (total_loh_a_last_bgc > 0)
+ if (total_uoh_a_last_bgc > 0)
{
calculate_tuning (loh_generation, true);
}
@@ -45835,9 +45824,6 @@ void gc_heap::background_sweep()
concurrent_print_time_delta ("Sw");
dprintf (2, ("---- (GC%zu)Background Sweep Phase ----", VolatileLoad(&settings.gc_index)));
- //block concurrent allocation for large objects
- dprintf (3, ("lh state: planning"));
-
for (int i = 0; i <= max_generation; i++)
{
generation* gen_to_reset = generation_of (i);
@@ -45886,6 +45872,9 @@ void gc_heap::background_sweep()
sweep_ro_segments();
#endif //FEATURE_BASICFREEZE
+ dprintf (3, ("lh state: planning"));
+
+ // Multiple threads may reach here. This conditional partially avoids multiple volatile writes.
if (current_c_gc_state != c_gc_state_planning)
{
current_c_gc_state = c_gc_state_planning;
@@ -45916,9 +45905,7 @@ void gc_heap::background_sweep()
if (heap_number == 0)
{
-#ifdef BGC_SERVO_TUNING
- get_and_reset_loh_alloc_info();
-#endif //BGC_SERVO_TUNING
+ get_and_reset_uoh_alloc_info();
uint64_t suspended_end_ts = GetHighPrecisionTimeStamp();
last_bgc_info[last_bgc_info_index].pause_durations[1] = (size_t)(suspended_end_ts - suspended_start_time);
total_suspended_time += last_bgc_info[last_bgc_info_index].pause_durations[1];
@@ -46247,6 +46234,7 @@ void gc_heap::background_sweep()
concurrent_print_time_delta ("Swe SOH");
FIRE_EVENT(BGC1stSweepEnd, 0);
+ //block concurrent allocation for UOH objects
enter_spin_lock (&more_space_lock_uoh);
add_saved_spinlock_info (true, me_acquire, mt_bgc_uoh_sweep, msl_entered);
@@ -46302,6 +46290,15 @@ void gc_heap::background_sweep()
// be accurate.
compute_new_dynamic_data (max_generation);
+ // We also need to adjust size_before for UOH allocations that occurred during sweeping.
+ gc_history_per_heap* current_gc_data_per_heap = get_gc_data_per_heap();
+ for (int i = uoh_start_generation; i < total_generation_count; i++)
+ {
+ assert(uoh_a_bgc_marking[i - uoh_start_generation] == 0);
+ assert(uoh_a_no_bgc[i - uoh_start_generation] == 0);
+ current_gc_data_per_heap->gen_data[i].size_before += uoh_a_bgc_planning[i - uoh_start_generation];
+ }
+
#ifdef DOUBLY_LINKED_FL
current_bgc_state = bgc_not_in_process;
@@ -50271,17 +50268,15 @@ void gc_heap::check_and_adjust_bgc_tuning (int gen_number, size_t physical_size,
}
}
}
+#endif //BGC_SERVO_TUNING
-void gc_heap::get_and_reset_loh_alloc_info()
+void gc_heap::get_and_reset_uoh_alloc_info()
{
- if (!bgc_tuning::enable_fl_tuning)
- return;
+ total_uoh_a_last_bgc = 0;
- total_loh_a_last_bgc = 0;
-
- uint64_t total_loh_a_no_bgc = 0;
- uint64_t total_loh_a_bgc_marking = 0;
- uint64_t total_loh_a_bgc_planning = 0;
+ uint64_t total_uoh_a_no_bgc = 0;
+ uint64_t total_uoh_a_bgc_marking = 0;
+ uint64_t total_uoh_a_bgc_planning = 0;
#ifdef MULTIPLE_HEAPS
for (int i = 0; i < gc_heap::n_heaps; i++)
{
@@ -50290,21 +50285,32 @@ void gc_heap::get_and_reset_loh_alloc_info()
{
gc_heap* hp = pGenGCHeap;
#endif //MULTIPLE_HEAPS
- total_loh_a_no_bgc += hp->loh_a_no_bgc;
- hp->loh_a_no_bgc = 0;
- total_loh_a_bgc_marking += hp->loh_a_bgc_marking;
- hp->loh_a_bgc_marking = 0;
- total_loh_a_bgc_planning += hp->loh_a_bgc_planning;
- hp->loh_a_bgc_planning = 0;
+
+ // We need to adjust size_before for UOH allocations that occurred during marking
+ // before we lose the values here.
+ gc_history_per_heap* current_gc_data_per_heap = hp->get_gc_data_per_heap();
+ // loh/poh_a_bgc_planning should be the same as they were when init_records set size_before.
+ for (int i = uoh_start_generation; i < total_generation_count; i++)
+ {
+ current_gc_data_per_heap->gen_data[i].size_before += hp->uoh_a_bgc_marking[i - uoh_start_generation];
+
+ total_uoh_a_no_bgc += hp->uoh_a_no_bgc[i - uoh_start_generation];
+ hp->uoh_a_no_bgc[i - uoh_start_generation] = 0;
+
+ total_uoh_a_bgc_marking += hp->uoh_a_bgc_marking[i - uoh_start_generation];
+ hp->uoh_a_bgc_marking[i - uoh_start_generation] = 0;
+
+ total_uoh_a_bgc_planning += hp->uoh_a_bgc_planning[i - uoh_start_generation];
+ hp->uoh_a_bgc_planning[i - uoh_start_generation] = 0;
+ }
}
dprintf (2, ("LOH alloc: outside bgc: %zd; bm: %zd; bp: %zd",
- total_loh_a_no_bgc,
- total_loh_a_bgc_marking,
- total_loh_a_bgc_planning));
+ total_uoh_a_no_bgc,
+ total_uoh_a_bgc_marking,
+ total_uoh_a_bgc_planning));
- total_loh_a_last_bgc = total_loh_a_no_bgc + total_loh_a_bgc_marking + total_loh_a_bgc_planning;
+ total_uoh_a_last_bgc = total_uoh_a_no_bgc + total_uoh_a_bgc_marking + total_uoh_a_bgc_planning;
}
-#endif //BGC_SERVO_TUNING
bool gc_heap::is_pm_ratio_exceeded()
{
diff --git a/src/coreclr/gc/gc.h b/src/coreclr/gc/gc.h
index a1093b5e76ceca..e80853489a2410 100644
--- a/src/coreclr/gc/gc.h
+++ b/src/coreclr/gc/gc.h
@@ -124,7 +124,10 @@ enum gc_generation_num
ephemeral_generation_count = max_generation,
// number of all generations
- total_generation_count = poh_generation + 1
+ total_generation_count = poh_generation + 1,
+
+ // number of uoh generations
+ uoh_generation_count = total_generation_count - uoh_start_generation
};
#ifdef GC_CONFIG_DRIVEN
diff --git a/src/coreclr/gc/gcpriv.h b/src/coreclr/gc/gcpriv.h
index 6db2e06d04c004..757f816c639fd3 100644
--- a/src/coreclr/gc/gcpriv.h
+++ b/src/coreclr/gc/gcpriv.h
@@ -140,10 +140,10 @@ inline void FATAL_GC_ERROR()
//
// This means any empty regions can be freely used for any generation. For
// Server GC we will balance regions between heaps.
-// For now disable regions for StandAlone GC, NativeAOT and MacOS builds
+// For now disable regions for standalone GC and macOS builds
#if defined (HOST_64BIT) && !defined (BUILD_AS_STANDALONE) && !defined(__APPLE__)
#define USE_REGIONS
-#endif //HOST_64BIT && BUILD_AS_STANDALONE
+#endif //HOST_64BIT && BUILD_AS_STANDALONE && !__APPLE__
//#define SPINLOCK_HISTORY
//#define RECORD_LOH_STATE
@@ -1752,6 +1752,8 @@ class gc_heap
PER_HEAP_ISOLATED_METHOD void add_to_history();
+ PER_HEAP_ISOLATED_METHOD void get_and_reset_uoh_alloc_info();
+
#ifdef BGC_SERVO_TUNING
// Currently BGC servo tuning is an experimental feature.
class bgc_tuning
@@ -1997,7 +1999,6 @@ class gc_heap
};
PER_HEAP_ISOLATED_METHOD void check_and_adjust_bgc_tuning (int gen_number, size_t physical_size, ptrdiff_t virtual_fl_size);
- PER_HEAP_ISOLATED_METHOD void get_and_reset_loh_alloc_info();
#endif //BGC_SERVO_TUNING
#ifndef USE_REGIONS
@@ -2230,6 +2231,8 @@ class gc_heap
PER_HEAP_METHOD BOOL bgc_loh_allocate_spin();
PER_HEAP_METHOD BOOL bgc_poh_allocate_spin();
+
+ PER_HEAP_METHOD void bgc_record_uoh_allocation(int gen_number, size_t size);
#endif //BACKGROUND_GC
PER_HEAP_METHOD void add_saved_spinlock_info (
@@ -3436,6 +3439,11 @@ class gc_heap
PER_HEAP_FIELD_SINGLE_GC uint8_t* next_sweep_obj;
PER_HEAP_FIELD_SINGLE_GC uint8_t* current_sweep_pos;
+
+ PER_HEAP_FIELD_SINGLE_GC size_t uoh_a_no_bgc[uoh_generation_count];
+ PER_HEAP_FIELD_SINGLE_GC size_t uoh_a_bgc_marking[uoh_generation_count];
+ PER_HEAP_FIELD_SINGLE_GC size_t uoh_a_bgc_planning[uoh_generation_count];
+
#ifdef DOUBLY_LINKED_FL
PER_HEAP_FIELD_SINGLE_GC heap_segment* current_sweep_seg;
#endif //DOUBLY_LINKED_FL
@@ -3461,9 +3469,6 @@ class gc_heap
#endif //SNOOP_STATS
#ifdef BGC_SERVO_TUNING
- PER_HEAP_FIELD_SINGLE_GC uint64_t loh_a_no_bgc;
- PER_HEAP_FIELD_SINGLE_GC uint64_t loh_a_bgc_marking;
- PER_HEAP_FIELD_SINGLE_GC uint64_t loh_a_bgc_planning;
PER_HEAP_FIELD_SINGLE_GC size_t bgc_maxgen_end_fl_size;
#endif //BGC_SERVO_TUNING
#endif //BACKGROUND_GC
@@ -4097,11 +4102,9 @@ class gc_heap
PER_HEAP_ISOLATED_FIELD_SINGLE_GC GCEvent bgc_start_event;
-#ifdef BGC_SERVO_TUNING
// Total allocated last BGC's plan + between last and this bgc +
// this bgc's mark
- PER_HEAP_ISOLATED_FIELD_SINGLE_GC uint64_t total_loh_a_last_bgc;
-#endif //BGC_SERVO_TUNING
+ PER_HEAP_ISOLATED_FIELD_SINGLE_GC uint64_t total_uoh_a_last_bgc;
#endif //BACKGROUND_GC
#ifdef USE_REGIONS
diff --git a/src/coreclr/ildasm/dis.cpp b/src/coreclr/ildasm/dis.cpp
index 2ad1ecd2d200a1..aae2e7ab472773 100644
--- a/src/coreclr/ildasm/dis.cpp
+++ b/src/coreclr/ildasm/dis.cpp
@@ -727,8 +727,7 @@ void OpenScope(ISymUnmanagedScope *pIScope,
char* DumpUnicodeString(void* GUICookie,
__inout __nullterminated char* szString,
_In_reads_(cbString) WCHAR* pszString,
- ULONG cbString,
- bool SwapString )
+ ULONG cbString)
{
unsigned i,L;
char* szStr=NULL, *szRet = NULL;
@@ -750,8 +749,7 @@ char* DumpUnicodeString(void* GUICookie,
#endif
#if BIGENDIAN
- if (SwapString)
- SwapStringLength(pszString, cbString);
+ SwapStringLength(pszString, cbString);
#endif
// first, check for embedded zeros:
@@ -782,7 +780,7 @@ char* DumpUnicodeString(void* GUICookie,
strcat_s(szString,SZSTRING_SIZE," (");
#if BIGENDIAN
- SwapStringLength(pszString, cbString);
+ SwapStringLength(pszString, cbString);
#endif
DumpByteArray(szString,(BYTE*)pszString,cbString*sizeof(WCHAR),GUICookie);
szRet = &szString[strlen(szString)];
@@ -2546,7 +2544,7 @@ void PrettyPrintToken(__inout __nullterminated char* szString, mdToken tk, IMDIn
}
if (pszString != NULL)
{
- DumpUnicodeString(GUICookie,szString,(WCHAR *)pszString,cbString, true);
+ DumpUnicodeString(GUICookie,szString,(WCHAR *)pszString,cbString);
}
else
{
diff --git a/src/coreclr/ildasm/dis.h b/src/coreclr/ildasm/dis.h
index 6aef96aefcfaea..1b60192a60b82e 100644
--- a/src/coreclr/ildasm/dis.h
+++ b/src/coreclr/ildasm/dis.h
@@ -78,8 +78,7 @@ void DumpVtable(void* GUICookie);
char* DumpUnicodeString(void* GUICookie,
__inout __nullterminated char* szString,
_In_reads_(cbString) WCHAR* pszString,
- ULONG cbString,
- bool SwapString = false);
+ ULONG cbString);
void TokenSigInit(IMDInternalImport *pImport);
void TokenSigDelete();
diff --git a/src/coreclr/inc/cordebug.idl b/src/coreclr/inc/cordebug.idl
index 2e06651c9249aa..077d811cd45b9a 100644
--- a/src/coreclr/inc/cordebug.idl
+++ b/src/coreclr/inc/cordebug.idl
@@ -7781,6 +7781,17 @@ interface ICorDebugExceptionObjectValue : IUnknown
HRESULT EnumerateExceptionCallStack([out] ICorDebugExceptionObjectCallStackEnum** ppCallStackEnum);
};
+[
+ object,
+ local,
+ uuid(e3b2f332-cc46-4f1e-ab4e-5400e332195e),
+ pointer_default(unique)
+]
+interface ICorDebugExceptionObjectValue2 : IUnknown
+{
+ HRESULT ForceCatchHandlerFoundEvents([in] BOOL enableEvents);
+};
+
/* ------------------------------------------------------------------------- *
* Library definition
* ------------------------------------------------------------------------- */
diff --git a/src/coreclr/inc/corinfo.h b/src/coreclr/inc/corinfo.h
index ff82759f6aab64..2d526105164b6b 100644
--- a/src/coreclr/inc/corinfo.h
+++ b/src/coreclr/inc/corinfo.h
@@ -496,7 +496,7 @@ enum CorInfoHelpFunc
CORINFO_HELP_ASSIGN_REF_ENSURE_NONHEAP, // Do the store, and ensure that the target was not in the heap.
CORINFO_HELP_ASSIGN_BYREF,
- CORINFO_HELP_ASSIGN_STRUCT,
+ CORINFO_HELP_BULK_WRITEBARRIER,
/* Accessing fields */
diff --git a/src/coreclr/inc/corjit.h b/src/coreclr/inc/corjit.h
index 4c012a7b263bf7..18a5be23fdde2d 100644
--- a/src/coreclr/inc/corjit.h
+++ b/src/coreclr/inc/corjit.h
@@ -452,7 +452,8 @@ class ICorJitInfo : public ICorDynamicInfo
uint32_t * pCountSchemaItems, // OUT: pointer to the count of schema items in `pSchema` array.
uint8_t ** pInstrumentationData, // OUT: `*pInstrumentationData` is set to the address of the instrumentation data
// (pointer will not remain valid after jit completes).
- PgoSource * pPgoSource // OUT: value describing source of pgo data
+ PgoSource * pPgoSource, // OUT: value describing source of pgo data
+ bool * pDynamicPgo // OUT: dynamic PGO is enabled (valid even when return value is failure)
) = 0;
// Allocate a profile buffer for use in the current process
diff --git a/src/coreclr/inc/icorjitinfoimpl_generated.h b/src/coreclr/inc/icorjitinfoimpl_generated.h
index 98b47dced2beb0..5572a044b9b0aa 100644
--- a/src/coreclr/inc/icorjitinfoimpl_generated.h
+++ b/src/coreclr/inc/icorjitinfoimpl_generated.h
@@ -711,7 +711,8 @@ JITINTERFACE_HRESULT getPgoInstrumentationResults(
ICorJitInfo::PgoInstrumentationSchema** pSchema,
uint32_t* pCountSchemaItems,
uint8_t** pInstrumentationData,
- ICorJitInfo::PgoSource* pgoSource) override;
+ ICorJitInfo::PgoSource* pPgoSource,
+ bool* pDynamicPgo) override;
JITINTERFACE_HRESULT allocPgoInstrumentationBySchema(
CORINFO_METHOD_HANDLE ftnHnd,
diff --git a/src/coreclr/inc/jiteeversionguid.h b/src/coreclr/inc/jiteeversionguid.h
index 743abdaff86aea..122906341d00b4 100644
--- a/src/coreclr/inc/jiteeversionguid.h
+++ b/src/coreclr/inc/jiteeversionguid.h
@@ -43,11 +43,11 @@ typedef const GUID *LPCGUID;
#define GUID_DEFINED
#endif // !GUID_DEFINED
-constexpr GUID JITEEVersionIdentifier = { /* 8f046bcb-ca5f-4692-9277-898b71cb7938 */
- 0x8f046bcb,
- 0xca5f,
- 0x4692,
- {0x92, 0x77, 0x89, 0x8b, 0x71, 0xcb, 0x79, 0x38}
+constexpr GUID JITEEVersionIdentifier = { /* bd8c41d4-8531-49c1-a600-0ae9bfe05de1 */
+ 0xbd8c41d4,
+ 0x8531,
+ 0x49c1,
+ {0xa6, 0x00, 0x0a, 0xe9, 0xbf, 0xe0, 0x5d, 0xe1}
};
//////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/src/coreclr/inc/jithelpers.h b/src/coreclr/inc/jithelpers.h
index f1711a9acfd9b2..2ee49994538592 100644
--- a/src/coreclr/inc/jithelpers.h
+++ b/src/coreclr/inc/jithelpers.h
@@ -153,8 +153,7 @@
JITHELPER(CORINFO_HELP_ASSIGN_REF_ENSURE_NONHEAP, JIT_WriteBarrierEnsureNonHeapTarget,CORINFO_HELP_SIG_REG_ONLY)
DYNAMICJITHELPER(CORINFO_HELP_ASSIGN_BYREF, JIT_ByRefWriteBarrier,CORINFO_HELP_SIG_NO_ALIGN_STUB)
-
- JITHELPER(CORINFO_HELP_ASSIGN_STRUCT, JIT_StructWriteBarrier,CORINFO_HELP_SIG_4_STACK)
+ DYNAMICJITHELPER(CORINFO_HELP_BULK_WRITEBARRIER, NULL, CORINFO_HELP_SIG_REG_ONLY)
// Accessing fields
JITHELPER(CORINFO_HELP_GETFIELD8, JIT_GetField8,CORINFO_HELP_SIG_REG_ONLY)
diff --git a/src/coreclr/inc/metadata.h b/src/coreclr/inc/metadata.h
index 766893bea17b8a..65ac907421c244 100644
--- a/src/coreclr/inc/metadata.h
+++ b/src/coreclr/inc/metadata.h
@@ -220,19 +220,6 @@ struct HENUMInternal
//*****************************************
typedef struct _MDDefaultValue
{
-#if BIGENDIAN
- _MDDefaultValue(void)
- {
- m_bType = ELEMENT_TYPE_END;
- }
- ~_MDDefaultValue(void)
- {
- if (m_bType == ELEMENT_TYPE_STRING)
- {
- delete[] m_wzValue;
- }
- }
-#endif
// type of default value
BYTE m_bType; // CorElementType for the default value
@@ -251,7 +238,7 @@ typedef struct _MDDefaultValue
ULONGLONG m_ullValue; // ELEMENT_TYPE_UI8
FLOAT m_fltValue; // ELEMENT_TYPE_R4
DOUBLE m_dblValue; // ELEMENT_TYPE_R8
- LPCWSTR m_wzValue; // ELEMENT_TYPE_STRING
+ LPCWSTR m_wzValue; // ELEMENT_TYPE_STRING - Little endian
IUnknown *m_unkValue; // ELEMENT_TYPE_CLASS
};
ULONG m_cbSize; // default value size (for blob)
diff --git a/src/coreclr/inc/new.hpp b/src/coreclr/inc/new.hpp
index 09eec5d1ffcf82..037ceb3c667c20 100644
--- a/src/coreclr/inc/new.hpp
+++ b/src/coreclr/inc/new.hpp
@@ -7,17 +7,9 @@
#ifndef __new__hpp
#define __new__hpp
-#if defined(_MSC_VER) && _MSC_VER < 1900
-#define NOEXCEPT
-#else
-#define NOEXCEPT noexcept
-#endif
-
-struct NoThrow { int x; };
-extern const NoThrow nothrow;
+#include
-void * __cdecl operator new(size_t n, const NoThrow&) NOEXCEPT;
-void * __cdecl operator new[](size_t n, const NoThrow&) NOEXCEPT;
+using std::nothrow;
#ifdef _DEBUG
void DisableThrowCheck();
diff --git a/src/coreclr/inc/readytorun.h b/src/coreclr/inc/readytorun.h
index 88219146a123a4..1c3ce8237ef7fc 100644
--- a/src/coreclr/inc/readytorun.h
+++ b/src/coreclr/inc/readytorun.h
@@ -20,7 +20,7 @@
// If you update this, ensure you run `git grep MINIMUM_READYTORUN_MAJOR_VERSION`
// and handle pending work.
#define READYTORUN_MAJOR_VERSION 0x0009
-#define READYTORUN_MINOR_VERSION 0x0002
+#define READYTORUN_MINOR_VERSION 0x0003
#define MINIMUM_READYTORUN_MAJOR_VERSION 0x009
@@ -34,6 +34,7 @@
// R2R Version 9.0 adds support for the Vector512 type
// R2R Version 9.1 adds new helpers to allocate objects on frozen segments
// R2R Version 9.2 adds MemZero and NativeMemSet helpers
+// R2R Version 9.3 adds BulkWriteBarrier helper
struct READYTORUN_CORE_HEADER
@@ -321,6 +322,7 @@ enum ReadyToRunHelper
READYTORUN_HELPER_WriteBarrier = 0x30,
READYTORUN_HELPER_CheckedWriteBarrier = 0x31,
READYTORUN_HELPER_ByRefWriteBarrier = 0x32,
+ READYTORUN_HELPER_BulkWriteBarrier = 0x33,
// Array helpers
READYTORUN_HELPER_Stelem_Ref = 0x38,
diff --git a/src/coreclr/inc/readytorunhelpers.h b/src/coreclr/inc/readytorunhelpers.h
index bbb586e8eb4a30..a1fcef8fbaf835 100644
--- a/src/coreclr/inc/readytorunhelpers.h
+++ b/src/coreclr/inc/readytorunhelpers.h
@@ -24,6 +24,7 @@ HELPER(READYTORUN_HELPER_ThrowDivZero, CORINFO_HELP_THROWDIVZERO,
HELPER(READYTORUN_HELPER_WriteBarrier, CORINFO_HELP_ASSIGN_REF, )
HELPER(READYTORUN_HELPER_CheckedWriteBarrier, CORINFO_HELP_CHECKED_ASSIGN_REF, )
HELPER(READYTORUN_HELPER_ByRefWriteBarrier, CORINFO_HELP_ASSIGN_BYREF, )
+HELPER(READYTORUN_HELPER_BulkWriteBarrier, CORINFO_HELP_BULK_WRITEBARRIER, )
HELPER(READYTORUN_HELPER_Stelem_Ref, CORINFO_HELP_ARRADDR_ST, )
HELPER(READYTORUN_HELPER_Ldelema_Ref, CORINFO_HELP_LDELEMA_REF, )
diff --git a/src/coreclr/inc/stresslog.h b/src/coreclr/inc/stresslog.h
index 8e89a06a838dcf..998c62254f6eca 100644
--- a/src/coreclr/inc/stresslog.h
+++ b/src/coreclr/inc/stresslog.h
@@ -807,7 +807,7 @@ class ThreadStressLog {
#endif //!STRESS_LOG_READONLY && !STRESS_LOG_ANALYZER
#if defined(MEMORY_MAPPED_STRESSLOG) && !defined(STRESS_LOG_ANALYZER)
- void* __cdecl operator new(size_t n, const NoThrow&) NOEXCEPT;
+ void* __cdecl operator new(size_t n, const std::nothrow_t&) noexcept;
void __cdecl operator delete (void * chunk);
#endif
diff --git a/src/coreclr/inc/utilcode.h b/src/coreclr/inc/utilcode.h
index ed15764b970e74..d4416a36919bd6 100644
--- a/src/coreclr/inc/utilcode.h
+++ b/src/coreclr/inc/utilcode.h
@@ -316,10 +316,10 @@ _Ret_bytecap_(n) void * __cdecl
operator new[](size_t n);
void __cdecl
-operator delete(void *p) NOEXCEPT;
+operator delete(void *p) noexcept;
void __cdecl
-operator delete[](void *p) NOEXCEPT;
+operator delete[](void *p) noexcept;
#ifdef _DEBUG_IMPL
HRESULT _OutOfMemory(LPCSTR szFile, int iLine);
@@ -3728,8 +3728,8 @@ extern const CExecutable executable;
void * __cdecl operator new(size_t n, const CExecutable&);
void * __cdecl operator new[](size_t n, const CExecutable&);
-void * __cdecl operator new(size_t n, const CExecutable&, const NoThrow&);
-void * __cdecl operator new[](size_t n, const CExecutable&, const NoThrow&);
+void * __cdecl operator new(size_t n, const CExecutable&, const std::nothrow_t&) noexcept;
+void * __cdecl operator new[](size_t n, const CExecutable&, const std::nothrow_t&) noexcept;
//
diff --git a/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp b/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
index 85db2ec5efffa5..e001c56c26dcbd 100644
--- a/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
+++ b/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
@@ -1666,10 +1666,11 @@ JITINTERFACE_HRESULT WrapICorJitInfo::getPgoInstrumentationResults(
ICorJitInfo::PgoInstrumentationSchema** pSchema,
uint32_t* pCountSchemaItems,
uint8_t** pInstrumentationData,
- ICorJitInfo::PgoSource* pgoSource)
+ ICorJitInfo::PgoSource* pPgoSource,
+ bool* pDynamicPgo)
{
API_ENTER(getPgoInstrumentationResults);
- JITINTERFACE_HRESULT temp = wrapHnd->getPgoInstrumentationResults(ftnHnd, pSchema, pCountSchemaItems, pInstrumentationData, pgoSource);
+ JITINTERFACE_HRESULT temp = wrapHnd->getPgoInstrumentationResults(ftnHnd, pSchema, pCountSchemaItems, pInstrumentationData, pPgoSource, pDynamicPgo);
API_LEAVE(getPgoInstrumentationResults);
return temp;
}
diff --git a/src/coreclr/jit/abi.cpp b/src/coreclr/jit/abi.cpp
index fd899b899546b1..7fa39d04644342 100644
--- a/src/coreclr/jit/abi.cpp
+++ b/src/coreclr/jit/abi.cpp
@@ -258,14 +258,20 @@ bool ABIPassingInformation::HasExactlyOneStackSegment() const
//
bool ABIPassingInformation::IsSplitAcrossRegistersAndStack() const
{
- bool anyReg = false;
- bool anyStack = false;
- for (unsigned i = 0; i < NumSegments; i++)
+ if (NumSegments < 2)
+ {
+ return false;
+ }
+
+ bool isFirstInReg = Segments[0].IsPassedInRegister();
+ for (unsigned i = 1; i < NumSegments; i++)
{
- anyReg |= Segments[i].IsPassedInRegister();
- anyStack |= Segments[i].IsPassedOnStack();
+ if (isFirstInReg != Segments[i].IsPassedInRegister())
+ {
+ return true;
+ }
}
- return anyReg && anyStack;
+ return false;
}
//-----------------------------------------------------------------------------
@@ -415,6 +421,9 @@ ABIPassingInformation SwiftABIClassifier::Classify(Compiler* comp,
{
ABIPassingSegment newSegment = elemInfo.Segments[j];
newSegment.Offset += lowering->offsets[i];
+ // Adjust the tail size if necessary; the lowered sequence can
+ // pass the tail as a larger type than the tail size.
+ newSegment.Size = min(newSegment.Size, structLayout->GetSize() - newSegment.Offset);
segments.Push(newSegment);
}
}
diff --git a/src/coreclr/jit/abi.h b/src/coreclr/jit/abi.h
index ac0ad5090dcf2b..37268ce3effd9f 100644
--- a/src/coreclr/jit/abi.h
+++ b/src/coreclr/jit/abi.h
@@ -51,8 +51,8 @@ struct ABIPassingInformation
// - On loongarch64/riscv64, structs can be passed in two registers or
// can be split out over register and stack, giving
// multiple register segments and a struct segment.
- unsigned NumSegments = 0;
- ABIPassingSegment* Segments = nullptr;
+ unsigned NumSegments;
+ ABIPassingSegment* Segments;
ABIPassingInformation(unsigned numSegments = 0, ABIPassingSegment* segments = nullptr)
: NumSegments(numSegments)
diff --git a/src/coreclr/jit/block.cpp b/src/coreclr/jit/block.cpp
index 60dbce6aaf00a0..ecee292264ec85 100644
--- a/src/coreclr/jit/block.cpp
+++ b/src/coreclr/jit/block.cpp
@@ -137,14 +137,17 @@ void FlowEdge::addLikelihood(weight_t addedLikelihood)
// AllSuccessorEnumerator: Construct an instance of the enumerator.
//
// Arguments:
-// comp - Compiler instance
-// block - The block whose successors are to be iterated
+// comp - Compiler instance
+// block - The block whose successors are to be iterated
+// useProfile - If true, determines the order of successors visited using profile data
//
-AllSuccessorEnumerator::AllSuccessorEnumerator(Compiler* comp, BasicBlock* block)
+AllSuccessorEnumerator::AllSuccessorEnumerator(Compiler* comp, BasicBlock* block, const bool useProfile /* = false */)
: m_block(block)
{
m_numSuccs = 0;
- block->VisitAllSuccs(comp, [this](BasicBlock* succ) {
+ block->VisitAllSuccs(
+ comp,
+ [this](BasicBlock* succ) {
if (m_numSuccs < ArrLen(m_successors))
{
m_successors[m_numSuccs] = succ;
@@ -152,18 +155,22 @@ AllSuccessorEnumerator::AllSuccessorEnumerator(Compiler* comp, BasicBlock* block
m_numSuccs++;
return BasicBlockVisit::Continue;
- });
+ },
+ useProfile);
if (m_numSuccs > ArrLen(m_successors))
{
m_pSuccessors = new (comp, CMK_BasicBlock) BasicBlock*[m_numSuccs];
unsigned numSuccs = 0;
- block->VisitAllSuccs(comp, [this, &numSuccs](BasicBlock* succ) {
+ block->VisitAllSuccs(
+ comp,
+ [this, &numSuccs](BasicBlock* succ) {
assert(numSuccs < m_numSuccs);
m_pSuccessors[numSuccs++] = succ;
return BasicBlockVisit::Continue;
- });
+ },
+ useProfile);
assert(numSuccs == m_numSuccs);
}
diff --git a/src/coreclr/jit/block.h b/src/coreclr/jit/block.h
index 168f29cca084dd..6ff2bb31b2a856 100644
--- a/src/coreclr/jit/block.h
+++ b/src/coreclr/jit/block.h
@@ -1256,27 +1256,6 @@ struct BasicBlock : private LIR::Range
this->scaleBBWeight(BB_ZERO_WEIGHT);
}
- // makeBlockHot()
- // This is used to override any profiling data
- // and force a block to be in the hot region.
- // We only call this method for handler entry point
- // and only when HANDLER_ENTRY_MUST_BE_IN_HOT_SECTION is 1.
- // Doing this helps fgReorderBlocks() by telling
- // it to try to move these blocks into the hot region.
- // Note that we do this strictly as an optimization,
- // not for correctness. fgDetermineFirstColdBlock()
- // will find all handler entry points and ensure that
- // for now we don't place them in the cold section.
- //
- void makeBlockHot()
- {
- if (this->bbWeight == BB_ZERO_WEIGHT)
- {
- this->RemoveFlags(BBF_RUN_RARELY | BBF_PROF_WEIGHT);
- this->bbWeight = 1;
- }
- }
-
bool isMaxBBWeight() const
{
return (bbWeight >= BB_MAX_WEIGHT);
@@ -1820,7 +1799,7 @@ struct BasicBlock : private LIR::Range
BasicBlockVisit VisitEHEnclosedHandlerSecondPassSuccs(Compiler* comp, TFunc func);
template
- BasicBlockVisit VisitAllSuccs(Compiler* comp, TFunc func);
+ BasicBlockVisit VisitAllSuccs(Compiler* comp, TFunc func, const bool useProfile = false);
template
BasicBlockVisit VisitEHSuccs(Compiler* comp, TFunc func);
@@ -2518,7 +2497,7 @@ class AllSuccessorEnumerator
public:
// Constructs an enumerator of all `block`'s successors.
- AllSuccessorEnumerator(Compiler* comp, BasicBlock* block);
+ AllSuccessorEnumerator(Compiler* comp, BasicBlock* block, const bool useProfile = false);
// Gets the block whose successors are enumerated.
BasicBlock* Block()
diff --git a/src/coreclr/jit/codegen.h b/src/coreclr/jit/codegen.h
index 3511935a062b0a..2a2ede6bae9ac0 100644
--- a/src/coreclr/jit/codegen.h
+++ b/src/coreclr/jit/codegen.h
@@ -101,6 +101,16 @@ class CodeGen final : public CodeGenInterface
}
}
+#if defined(TARGET_ARM64)
+ regNumber getNextSIMDRegWithWraparound(regNumber reg)
+ {
+ regNumber nextReg = REG_NEXT(reg);
+
+ // Wraparound if necessary, REG_V0 comes next after REG_V31.
+ return (nextReg > REG_V31) ? REG_V0 : nextReg;
+ }
+#endif // defined(TARGET_ARM64)
+
static GenTreeIndir indirForm(var_types type, GenTree* base);
static GenTreeStoreInd storeIndirForm(var_types type, GenTree* base, GenTree* data);
@@ -274,7 +284,9 @@ class CodeGen final : public CodeGenInterface
void genEnregisterOSRArgsAndLocals();
#endif
+ void genHomeStackSegment(unsigned lclNum, const ABIPassingSegment& seg, regNumber initReg, bool* pInitRegZeroed);
void genHomeSwiftStructParameters(bool handleStack);
+ void genHomeStackPartOfSplitParameter(regNumber initReg, bool* initRegStillZeroed);
void genCheckUseBlockInit();
#if defined(UNIX_AMD64_ABI) && defined(FEATURE_SIMD)
@@ -437,26 +449,7 @@ class CodeGen final : public CodeGenInterface
FuncletFrameInfoDsc genFuncletInfo;
-#elif defined(TARGET_LOONGARCH64)
-
- // A set of information that is used by funclet prolog and epilog generation.
- // It is collected once, before funclet prologs and epilogs are generated,
- // and used by all funclet prologs and epilogs, which must all be the same.
- struct FuncletFrameInfoDsc
- {
- regMaskTP fiSaveRegs; // Set of callee-saved registers saved in the funclet prolog (includes RA)
- int fiFunction_CallerSP_to_FP_delta; // Delta between caller SP and the frame pointer in the parent function
- // (negative)
- int fiSP_to_CalleeSaved_delta; // CalleeSaved register save offset from SP (positive)
- int fiCalleeSavedPadding; // CalleeSaved offset padding (positive)
- int fiSP_to_PSP_slot_delta; // PSP slot offset from SP (positive)
- int fiCallerSP_to_PSP_slot_delta; // PSP slot offset from Caller SP (negative)
- int fiSpDelta; // Stack pointer delta (negative)
- };
-
- FuncletFrameInfoDsc genFuncletInfo;
-
-#elif defined(TARGET_RISCV64)
+#elif defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64)
// A set of information that is used by funclet prolog and epilog generation.
// It is collected once, before funclet prologs and epilogs are generated,
@@ -467,7 +460,6 @@ class CodeGen final : public CodeGenInterface
int fiFunction_CallerSP_to_FP_delta; // Delta between caller SP and the frame pointer in the parent function
// (negative)
int fiSP_to_CalleeSaved_delta; // CalleeSaved register save offset from SP (positive)
- int fiCalleeSavedPadding; // CalleeSaved offset padding (positive)
int fiSP_to_PSP_slot_delta; // PSP slot offset from SP (positive)
int fiCallerSP_to_PSP_slot_delta; // PSP slot offset from Caller SP (negative)
int fiSpDelta; // Stack pointer delta (negative)
@@ -1272,7 +1264,6 @@ class CodeGen final : public CodeGenInterface
void genJmpMethod(GenTree* jmp);
BasicBlock* genCallFinally(BasicBlock* block);
#if defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64)
- // TODO: refactor for LA.
void genCodeForJumpCompare(GenTreeOpCC* tree);
#endif
#if defined(TARGET_ARM64)
diff --git a/src/coreclr/jit/codegenarm.cpp b/src/coreclr/jit/codegenarm.cpp
index 2c010f116a2657..dea8b19fbee94f 100644
--- a/src/coreclr/jit/codegenarm.cpp
+++ b/src/coreclr/jit/codegenarm.cpp
@@ -280,7 +280,7 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
if (targetType == TYP_FLOAT)
{
// Get a temp integer register
- regNumber tmpReg = tree->GetSingleTempReg();
+ regNumber tmpReg = internalRegisters.GetSingle(tree);
float f = forceCastToFloat(constValue);
instGen_Set_Reg_To_Imm(EA_4BYTE, tmpReg, *((int*)(&f)));
@@ -293,8 +293,8 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
unsigned* cv = (unsigned*)&constValue;
// Get two temp integer registers
- regNumber tmpReg1 = tree->ExtractTempReg();
- regNumber tmpReg2 = tree->GetSingleTempReg();
+ regNumber tmpReg1 = internalRegisters.Extract(tree);
+ regNumber tmpReg2 = internalRegisters.GetSingle(tree);
instGen_Set_Reg_To_Imm(EA_4BYTE, tmpReg1, cv[0]);
instGen_Set_Reg_To_Imm(EA_4BYTE, tmpReg2, cv[1]);
@@ -431,9 +431,9 @@ void CodeGen::genLclHeap(GenTree* tree)
}
// Setup the regTmp, if there is one.
- if (tree->AvailableTempRegCount() > 0)
+ if (internalRegisters.Count(tree) > 0)
{
- regTmp = tree->ExtractTempReg();
+ regTmp = internalRegisters.Extract(tree);
}
// If we have an outgoing arg area then we must adjust the SP by popping off the
@@ -833,7 +833,7 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_DST_BYREF, dstAddr->TypeGet());
// Temp register used to perform the sequence of loads and stores.
- regNumber tmpReg = cpObjNode->ExtractTempReg();
+ regNumber tmpReg = internalRegisters.Extract(cpObjNode);
assert(genIsValidIntReg(tmpReg));
if (cpObjNode->IsVolatile())
@@ -1026,18 +1026,18 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree)
{
// Arm supports unaligned access only for integer types,
// convert the storing floating data into 1 or 2 integer registers and write them as int.
- regNumber addr = tree->ExtractTempReg();
+ regNumber addr = internalRegisters.Extract(tree);
emit->emitIns_R_S(INS_lea, EA_PTRSIZE, addr, varNum, offset);
if (targetType == TYP_FLOAT)
{
- regNumber floatAsInt = tree->GetSingleTempReg();
+ regNumber floatAsInt = internalRegisters.GetSingle(tree);
emit->emitIns_Mov(INS_vmov_f2i, EA_4BYTE, floatAsInt, dataReg, /* canSkip */ false);
emit->emitIns_R_R(INS_str, EA_4BYTE, floatAsInt, addr);
}
else
{
- regNumber halfdoubleAsInt1 = tree->ExtractTempReg();
- regNumber halfdoubleAsInt2 = tree->GetSingleTempReg();
+ regNumber halfdoubleAsInt1 = internalRegisters.Extract(tree);
+ regNumber halfdoubleAsInt2 = internalRegisters.GetSingle(tree);
emit->emitIns_R_R_R(INS_vmov_d2i, EA_8BYTE, halfdoubleAsInt1, halfdoubleAsInt2, dataReg);
emit->emitIns_R_R_I(INS_str, EA_4BYTE, halfdoubleAsInt1, addr, 0);
emit->emitIns_R_R_I(INS_str, EA_4BYTE, halfdoubleAsInt1, addr, 4);
@@ -1209,7 +1209,7 @@ void CodeGen::genCkfinite(GenTree* treeNode)
emitter* emit = GetEmitter();
var_types targetType = treeNode->TypeGet();
- regNumber intReg = treeNode->GetSingleTempReg();
+ regNumber intReg = internalRegisters.GetSingle(treeNode);
regNumber fpReg = genConsumeReg(treeNode->AsOp()->gtOp1);
regNumber targetReg = treeNode->GetRegNum();
@@ -1592,7 +1592,7 @@ void CodeGen::genFloatToIntCast(GenTree* treeNode)
genConsumeOperands(treeNode->AsOp());
- regNumber tmpReg = treeNode->GetSingleTempReg();
+ regNumber tmpReg = internalRegisters.GetSingle(treeNode);
assert(insVcvt != INS_invalid);
GetEmitter()->emitIns_R_R(insVcvt, dstSize, tmpReg, op1->GetRegNum());
diff --git a/src/coreclr/jit/codegenarm64.cpp b/src/coreclr/jit/codegenarm64.cpp
index 8695545cc934a3..dc79220dcd0b87 100644
--- a/src/coreclr/jit/codegenarm64.cpp
+++ b/src/coreclr/jit/codegenarm64.cpp
@@ -2372,7 +2372,7 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
else
{
// Get a temp integer register to compute long address.
- regNumber addrReg = tree->GetSingleTempReg();
+ regNumber addrReg = internalRegisters.GetSingle(tree);
// We must load the FP constant from the constant pool
// Emit a data section constant for the float or double constant.
@@ -2407,7 +2407,7 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
else
{
// Get a temp integer register to compute long address.
- regNumber addrReg = tree->GetSingleTempReg();
+ regNumber addrReg = internalRegisters.GetSingle(tree);
simd8_t constValue;
memcpy(&constValue, &vecCon->gtSimdVal, sizeof(simd8_t));
@@ -2431,7 +2431,7 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
else
{
// Get a temp integer register to compute long address.
- regNumber addrReg = tree->GetSingleTempReg();
+ regNumber addrReg = internalRegisters.GetSingle(tree);
simd16_t constValue = {};
memcpy(&constValue, &vecCon->gtSimdVal, sizeof(simd12_t));
@@ -2455,7 +2455,7 @@ void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTre
else
{
// Get a temp integer register to compute long address.
- regNumber addrReg = tree->GetSingleTempReg();
+ regNumber addrReg = internalRegisters.GetSingle(tree);
simd16_t constValue;
memcpy(&constValue, &vecCon->gtSimdVal, sizeof(simd16_t));
@@ -3132,12 +3132,12 @@ void CodeGen::genLclHeap(GenTree* tree)
// since we don't need any internal registers.
if (compiler->info.compInitMem)
{
- assert(tree->AvailableTempRegCount() == 0);
+ assert(internalRegisters.Count(tree) == 0);
regCnt = targetReg;
}
else
{
- regCnt = tree->ExtractTempReg();
+ regCnt = internalRegisters.Extract(tree);
inst_Mov(size->TypeGet(), regCnt, targetReg, /* canSkip */ true);
}
@@ -3254,12 +3254,12 @@ void CodeGen::genLclHeap(GenTree* tree)
assert(regCnt == REG_NA);
if (compiler->info.compInitMem)
{
- assert(tree->AvailableTempRegCount() == 0);
+ assert(internalRegisters.Count(tree) == 0);
regCnt = targetReg;
}
else
{
- regCnt = tree->ExtractTempReg();
+ regCnt = internalRegisters.Extract(tree);
}
instGen_Set_Reg_To_Imm(((unsigned int)amount == amount) ? EA_4BYTE : EA_8BYTE, regCnt, amount);
}
@@ -3323,7 +3323,7 @@ void CodeGen::genLclHeap(GenTree* tree)
//
// Setup the regTmp
- regNumber regTmp = tree->GetSingleTempReg();
+ regNumber regTmp = internalRegisters.GetSingle(tree);
BasicBlock* loop = genCreateTempLabel();
BasicBlock* done = genCreateTempLabel();
@@ -3668,7 +3668,7 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
unsigned slots = layout->GetSlotCount();
// Temp register(s) used to perform the sequence of loads and stores.
- regNumber tmpReg = cpObjNode->ExtractTempReg(RBM_ALLINT);
+ regNumber tmpReg = internalRegisters.Extract(cpObjNode, RBM_ALLINT);
regNumber tmpReg2 = REG_NA;
assert(genIsValidIntReg(tmpReg));
@@ -3677,7 +3677,7 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
if (slots > 1)
{
- tmpReg2 = cpObjNode->ExtractTempReg(RBM_ALLINT);
+ tmpReg2 = internalRegisters.Extract(cpObjNode, RBM_ALLINT);
assert(tmpReg2 != tmpReg);
assert(genIsValidIntReg(tmpReg2));
assert(tmpReg2 != REG_WRITE_BARRIER_DST_BYREF);
@@ -3730,8 +3730,8 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
regNumber tmpSimdReg2 = REG_NA;
if ((slots >= 4) && compiler->IsBaselineSimdIsaSupported())
{
- tmpSimdReg1 = cpObjNode->ExtractTempReg(RBM_ALLFLOAT);
- tmpSimdReg2 = cpObjNode->ExtractTempReg(RBM_ALLFLOAT);
+ tmpSimdReg1 = internalRegisters.Extract(cpObjNode, RBM_ALLFLOAT);
+ tmpSimdReg2 = internalRegisters.Extract(cpObjNode, RBM_ALLFLOAT);
}
unsigned i = 0;
@@ -3810,7 +3810,7 @@ void CodeGen::genTableBasedSwitch(GenTree* treeNode)
regNumber idxReg = treeNode->AsOp()->gtOp1->GetRegNum();
regNumber baseReg = treeNode->AsOp()->gtOp2->GetRegNum();
- regNumber tmpReg = treeNode->GetSingleTempReg();
+ regNumber tmpReg = internalRegisters.GetSingle(treeNode);
// load the ip-relative offset (which is relative to start of fgFirstBB)
GetEmitter()->emitIns_R_R_R(INS_ldr, EA_4BYTE, baseReg, baseReg, idxReg, INS_OPTS_LSL);
@@ -3869,7 +3869,7 @@ void CodeGen::genLockedInstructions(GenTreeOp* treeNode)
case GT_XAND:
{
// Grab a temp reg to perform `MVN` for dataReg first.
- regNumber tempReg = treeNode->GetSingleTempReg();
+ regNumber tempReg = internalRegisters.GetSingle(treeNode);
GetEmitter()->emitIns_R_R(INS_mvn, dataSize, tempReg, dataReg);
GetEmitter()->emitIns_R_R_R(INS_ldclral, dataSize, tempReg, (targetReg == REG_NA) ? REG_ZR : targetReg,
addrReg);
@@ -3902,9 +3902,10 @@ void CodeGen::genLockedInstructions(GenTreeOp* treeNode)
// These are imported normally if Atomics aren't supported.
assert(!treeNode->OperIs(GT_XORR, GT_XAND));
- regNumber exResultReg = treeNode->ExtractTempReg(RBM_ALLINT);
- regNumber storeDataReg = (treeNode->OperGet() == GT_XCHG) ? dataReg : treeNode->ExtractTempReg(RBM_ALLINT);
- regNumber loadReg = (targetReg != REG_NA) ? targetReg : storeDataReg;
+ regNumber exResultReg = internalRegisters.Extract(treeNode, RBM_ALLINT);
+ regNumber storeDataReg =
+ (treeNode->OperGet() == GT_XCHG) ? dataReg : internalRegisters.Extract(treeNode, RBM_ALLINT);
+ regNumber loadReg = (targetReg != REG_NA) ? targetReg : storeDataReg;
// Check allocator assumptions
//
@@ -4055,7 +4056,7 @@ void CodeGen::genCodeForCmpXchg(GenTreeCmpXchg* treeNode)
}
else
{
- regNumber exResultReg = treeNode->ExtractTempReg(RBM_ALLINT);
+ regNumber exResultReg = internalRegisters.Extract(treeNode, RBM_ALLINT);
// Check allocator assumptions
//
@@ -4600,7 +4601,7 @@ void CodeGen::genCkfinite(GenTree* treeNode)
emitter* emit = GetEmitter();
// Extract exponent into a register.
- regNumber intReg = treeNode->GetSingleTempReg();
+ regNumber intReg = internalRegisters.GetSingle(treeNode);
regNumber fpReg = genConsumeReg(op1);
inst_Mov(targetType, intReg, fpReg, /* canSkip */ false, emitActualTypeSize(treeNode));
@@ -5351,7 +5352,7 @@ void CodeGen::genStoreIndTypeSimd12(GenTreeStoreInd* treeNode)
regNumber dataReg = genConsumeReg(data);
// Need an additional integer register to extract upper 4 bytes from data.
- regNumber tmpReg = treeNode->GetSingleTempReg();
+ regNumber tmpReg = internalRegisters.GetSingle(treeNode);
// 8-byte write
GetEmitter()->emitIns_R_R(INS_str, EA_8BYTE, dataReg, addrReg);
@@ -5386,7 +5387,7 @@ void CodeGen::genLoadIndTypeSimd12(GenTreeIndir* treeNode)
regNumber addrReg = genConsumeReg(addr);
// Need an additional int register to read upper 4 bytes, which is different from targetReg
- regNumber tmpReg = treeNode->GetSingleTempReg();
+ regNumber tmpReg = internalRegisters.GetSingle(treeNode);
// 8-byte read
GetEmitter()->emitIns_R_R(INS_ldr, EA_8BYTE, tgtReg, addrReg);
diff --git a/src/coreclr/jit/codegenarm64test.cpp b/src/coreclr/jit/codegenarm64test.cpp
index 52633ed6733e6a..ddc7d831f228df 100644
--- a/src/coreclr/jit/codegenarm64test.cpp
+++ b/src/coreclr/jit/codegenarm64test.cpp
@@ -4572,14 +4572,24 @@ void CodeGen::genArm64EmitterUnitTestsSve()
INS_OPTS_SCALABLE_S); // EOR ., /M, ., .
theEmitter->emitIns_R_R_R(INS_sve_orr, EA_SCALABLE, REG_V29, REG_P7, REG_V31,
INS_OPTS_SCALABLE_D); // ORR ., /M, ., .
-
- // IF_SVE_AB_3A
theEmitter->emitIns_R_R_R(INS_sve_add, EA_SCALABLE, REG_V5, REG_P6, REG_V7,
INS_OPTS_SCALABLE_B); // ADD ., /M, ., .
theEmitter->emitIns_R_R_R(INS_sve_sub, EA_SCALABLE, REG_V15, REG_P7, REG_V29,
INS_OPTS_SCALABLE_H); // SUB ., /M, ., .
theEmitter->emitIns_R_R_R(INS_sve_subr, EA_SCALABLE, REG_V2, REG_P0, REG_V13,
INS_OPTS_SCALABLE_S); // SUBR ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_smax, EA_SCALABLE, REG_V24, REG_P0, REG_V2,
+ INS_OPTS_SCALABLE_B); // SMAX ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_smin, EA_SCALABLE, REG_V9, REG_P1, REG_V27,
+ INS_OPTS_SCALABLE_H); // SMIN ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_sabd, EA_SCALABLE, REG_V5, REG_P2, REG_V6,
+ INS_OPTS_SCALABLE_B); // SABD ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_uabd, EA_SCALABLE, REG_V23, REG_P3, REG_V9,
+ INS_OPTS_SCALABLE_S); // UABD ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_umax, EA_SCALABLE, REG_V15, REG_P4, REG_V2,
+ INS_OPTS_SCALABLE_S); // UMAX ., /M, ., .
+ theEmitter->emitIns_R_R_R(INS_sve_umin, EA_SCALABLE, REG_V12, REG_P7, REG_V0,
+ INS_OPTS_SCALABLE_D); // UMIN ., /M, ., .
#ifdef ALL_ARM64_EMITTER_UNIT_TESTS_SVE_UNSUPPORTED
// IF_SVE_AB_3B
@@ -4599,20 +4609,6 @@ void CodeGen::genArm64EmitterUnitTestsSve()
theEmitter->emitIns_R_R_R(INS_sve_udivr, EA_SCALABLE, REG_V13, REG_P7, REG_V15,
INS_OPTS_SCALABLE_D); // UDIVR ., /M, ., .
- // IF_SVE_AD_3A
- theEmitter->emitIns_R_R_R(INS_sve_smax, EA_SCALABLE, REG_V24, REG_P0, REG_V2,
- INS_OPTS_SCALABLE_B); // SMAX ., /M, ., .
- theEmitter->emitIns_R_R_R(INS_sve_smin, EA_SCALABLE, REG_V9, REG_P1, REG_V27,
- INS_OPTS_SCALABLE_H); // SMIN ., /M, ., .
- theEmitter->emitIns_R_R_R(INS_sve_sabd, EA_SCALABLE, REG_V5, REG_P2, REG_V6,
- INS_OPTS_SCALABLE_B); // SABD ., /M, ., .
- theEmitter->emitIns_R_R_R(INS_sve_uabd, EA_SCALABLE, REG_V23, REG_P3, REG_V9,
- INS_OPTS_SCALABLE_S); // UABD ., /M, ., .
- theEmitter->emitIns_R_R_R(INS_sve_umax, EA_SCALABLE, REG_V15, REG_P4, REG_V2,
- INS_OPTS_SCALABLE_S); // UMAX ., /M, ., .
- theEmitter->emitIns_R_R_R(INS_sve_umin, EA_SCALABLE, REG_V12, REG_P7, REG_V0,
- INS_OPTS_SCALABLE_D); // UMIN ., /M, ., .
-
// IF_SVE_AE_3A
theEmitter->emitIns_R_R_R(INS_sve_mul, EA_SCALABLE, REG_V5, REG_P1, REG_V3,
INS_OPTS_SCALABLE_D); // MUL ., /M, ., .
@@ -4802,11 +4798,11 @@ void CodeGen::genArm64EmitterUnitTestsSve()
INS_OPTS_SCALABLE_D); // CLASTB ., , ., .
// IF_SVE_CN_3A
- theEmitter->emitIns_R_R_R(INS_sve_clasta, EA_2BYTE, REG_V12, REG_P1, REG_V15, INS_OPTS_SCALABLE_H,
+ theEmitter->emitIns_R_R_R(INS_sve_clasta, EA_SCALABLE, REG_V12, REG_P1, REG_V15, INS_OPTS_SCALABLE_H,
INS_SCALABLE_OPTS_WITH_SIMD_SCALAR); // CLASTA , , , .
- theEmitter->emitIns_R_R_R(INS_sve_clastb, EA_4BYTE, REG_V13, REG_P2, REG_V16, INS_OPTS_SCALABLE_S,
+ theEmitter->emitIns_R_R_R(INS_sve_clastb, EA_SCALABLE, REG_V13, REG_P2, REG_V16, INS_OPTS_SCALABLE_S,
INS_SCALABLE_OPTS_WITH_SIMD_SCALAR); // CLASTB , , , .
- theEmitter->emitIns_R_R_R(INS_sve_clastb, EA_8BYTE, REG_V14, REG_P0, REG_V17, INS_OPTS_SCALABLE_D,
+ theEmitter->emitIns_R_R_R(INS_sve_clastb, EA_SCALABLE, REG_V14, REG_P0, REG_V17, INS_OPTS_SCALABLE_D,
INS_SCALABLE_OPTS_WITH_SIMD_SCALAR); // CLASTB , , , .
// IF_SVE_CO_3A
@@ -5116,11 +5112,11 @@ void CodeGen::genArm64EmitterUnitTestsSve()
INS_OPTS_SCALABLE_H); // FMINP ., /M, ., .
// IF_SVE_HJ_3A
- theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_2BYTE, REG_V21, REG_P6, REG_V14,
+ theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_SCALABLE, REG_V21, REG_P6, REG_V14,
INS_OPTS_SCALABLE_H); // FADDA , , , .
- theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_4BYTE, REG_V22, REG_P5, REG_V13,
+ theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_SCALABLE, REG_V22, REG_P5, REG_V13,
INS_OPTS_SCALABLE_S); // FADDA , , , .
- theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_8BYTE, REG_V23, REG_P4, REG_V12,
+ theEmitter->emitIns_R_R_R(INS_sve_fadda, EA_SCALABLE, REG_V23, REG_P4, REG_V12,
INS_OPTS_SCALABLE_D); // FADDA , , , .
// IF_SVE_HL_3A
@@ -5292,13 +5288,13 @@ void CodeGen::genArm64EmitterUnitTestsSve()
INS_OPTS_SCALABLE_H); // FNMLS ., /M, .,