diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 2f4ce2358e0..5dc9bb8d5f5 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -15,7 +15,7 @@
]
},
"microsoft.dotnet.xharness.cli": {
- "version": "9.0.0-prerelease.24405.1",
+ "version": "10.0.0-prerelease.24466.1",
"commands": [
"xharness"
]
diff --git a/.github/workflows/inter-branch-merge-flow.yml b/.github/workflows/inter-branch-merge-flow.yml
new file mode 100644
index 00000000000..20246c14fc5
--- /dev/null
+++ b/.github/workflows/inter-branch-merge-flow.yml
@@ -0,0 +1,13 @@
+name: Inter-branch merge workflow
+on:
+ push:
+ branches:
+ - release/**
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ Merge:
+ uses: dotnet/arcade/.github/workflows/inter-branch-merge-base.yml@main
\ No newline at end of file
diff --git a/.github/workflows/jit-format.yml b/.github/workflows/jit-format.yml
index be0a5d854a9..18fb209c628 100644
--- a/.github/workflows/jit-format.yml
+++ b/.github/workflows/jit-format.yml
@@ -15,7 +15,7 @@ jobs:
os:
- name: linux
image: ubuntu-latest
- container: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0
+ container: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64
extension: '.sh'
cross: '--cross'
rootfs: '/crossrootfs/x64'
diff --git a/Directory.Build.props b/Directory.Build.props
index 726c8923b88..d483fa49709 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -485,6 +485,8 @@
true
+
+ false$(NoWarn);SYSLIB0011;SYSLIB0050;SYSLIB0051
diff --git a/Directory.Build.targets b/Directory.Build.targets
index bc5b847efc6..279184d1eb0 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -162,7 +162,7 @@
AfterTargets="ResolveTargetingPackAssets">
<_targetingPackReferenceExclusion Include="$(TargetName)" />
- <_targetingPackReferenceExclusion Include="@(_ResolvedProjectReferencePaths->Metadata('Filename'))" />
+ <_targetingPackReferenceExclusionProjects Include="@(_ResolvedProjectReferencePaths->Metadata('Filename'))" />
<_targetingPackReferenceExclusion Include="@(DefaultReferenceExclusion)" />
@@ -177,7 +177,7 @@
<_targetingPackReferenceWithProjectName Include="@(Reference->WithMetadataValue('ExternallyResolved', 'true')->Metadata('Filename'))"
OriginalIdentity="%(Identity)" />
<_targetingPackIncludedReferenceWithProjectName Include="@(_targetingPackReferenceWithProjectName)"
- Exclude="@(_targetingPackReferenceExclusion)" />
+ Exclude="@(_targetingPackReferenceExclusion);@(_targetingPackReferenceExclusionProjects->WithMetadataValue('ReferenceOutputAssembly', 'true'))" />
<_targetingPackExcludedReferenceWithProjectName Include="@(_targetingPackReferenceWithProjectName)"
Exclude="@(_targetingPackIncludedReferenceWithProjectName)" />
@@ -187,7 +187,7 @@
<_targetingPackAnalyzerReferenceWithProjectName Include="@(Analyzer->WithMetadataValue('ExternallyResolved', 'true')->Metadata('Filename'))"
OriginalIdentity="%(Identity)" />
<_targetingPackIncludedAnalyzerReferenceWithProjectName Include="@(_targetingPackAnalyzerReferenceWithProjectName)"
- Exclude="@(_targetingPackReferenceExclusion)" />
+ Exclude="@(_targetingPackReferenceExclusion);@(_targetingPackReferenceExclusionProjects->WithMetadataValue('OutputItemType', 'Analyzer'))" />
<_targetingPackExcludedAnalyzerReferenceWithProjectName Include="@(_targetingPackAnalyzerReferenceWithProjectName)"
Exclude="@(_targetingPackIncludedAnalyzerReferenceWithProjectName)" />
diff --git a/NuGet.config b/NuGet.config
index c9f91b53273..3a61ca31799 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -20,6 +20,8 @@
+
+
diff --git a/THIRD-PARTY-NOTICES.TXT b/THIRD-PARTY-NOTICES.TXT
index 18e149ccfd1..3dc0d7aa859 100644
--- a/THIRD-PARTY-NOTICES.TXT
+++ b/THIRD-PARTY-NOTICES.TXT
@@ -69,19 +69,27 @@ written authorization of the copyright holder.
License notice for zlib-ng
-----------------------
-https://github.com/zlib-ng/zlib-ng/blob/develop/LICENSE.md
+https://github.com/zlib-ng/zlib-ng/blob/d54e3769be0c522015b784eca2af258b1c026107/LICENSE.md
(C) 1995-2024 Jean-loup Gailly and Mark Adler
-This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software.
+This software is provided 'as-is', without any express or implied
+warranty. In no event will the authors be held liable for any damages
+arising from the use of this software.
-Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions:
+Permission is granted to anyone to use this software for any purpose,
+including commercial applications, and to alter it and redistribute it
+freely, subject to the following restrictions:
-The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
-Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
-This notice may not be removed or altered from any source distribution.
+3. This notice may not be removed or altered from any source distribution.
License notice for LinuxTracepoints
-----------------------------------
diff --git a/docs/area-owners.md b/docs/area-owners.md
index 59c7e97da44..9aa33a8a52f 100644
--- a/docs/area-owners.md
+++ b/docs/area-owners.md
@@ -65,6 +65,7 @@ Note: Editing this file doesn't update the mapping used by `@msftbot` for area-s
| area-Single-File | @agocke | @vitek-karas @vsadov | |
| area-Snap | @MichaelSimons | @NikolaMilosavljevic @leecow @MichaelSimons | |
| area-System.Buffers | @jeffhandley | @dotnet/area-system-buffers | |
+| area-System.ClientModel | @terrajobst | @dotnet/fxdc | Bugs and feature requests should go to https://github.com/Azure/azure-sdk-for-net/issues. We don't own the code, but FXDC reviews changes to determine overlap with other `System` concepts. The Azure SDK team will post API updates in this repo for us to review. |
| area-System.CodeDom | @ericstj | @dotnet/area-system-codedom | |
| area-System.Collections | @jeffhandley | @dotnet/area-system-collections | Excluded:
System.Array -> System.Runtime
|
| area-System.ComponentModel | @ericstj | @dotnet/area-system-componentmodel | Consultants: @dotnet/dotnet-winforms |
diff --git a/docs/coding-guidelines/libraries-packaging.md b/docs/coding-guidelines/libraries-packaging.md
index 61829fe5459..0ae462bc713 100644
--- a/docs/coding-guidelines/libraries-packaging.md
+++ b/docs/coding-guidelines/libraries-packaging.md
@@ -18,6 +18,14 @@ Source generators and analyzers can be included in the shared framework by addin
Removing a library from the shared framework is a breaking change and should be avoided.
+### References to libraries in the shared framework that produce packages
+
+It's beneficial to avoid project references to libraries that are in the shared framework because it makes the package graph smaller which reduces the number of packages that require servicing and the number of libraries that end up being copied into the application directory.
+
+If a dependency is part of the shared framework a project/package reference is never required on the latest version (`NetCoreAppCurrent`). A reference is required for previous .NET versions even if the dependency is part of the shared framework if the project you are building targets .NETStandard and references the project there. You may completely avoid a package dependency on .NETStandard and .NET if it's not needed for .NETStandard (for example - if it is an implementation only dependency and you're building a PNSE assembly for .NETStandard).
+
+Warning NETPKG0001 is emitted when you have an unnecessary reference to a library that is part of the shared framework. To avoid this warning, make sure your ProjectReference is conditioned so that it doesn't apply on `NetCoreAppCurrent`.
+
## Transport package
Transport packages are non-shipping packages that dotnet/runtime produces in order to share binaries with other repositories.
diff --git a/docs/deep-dive-blog-posts.md b/docs/deep-dive-blog-posts.md
index 56e29a1f587..367cd30b264 100644
--- a/docs/deep-dive-blog-posts.md
+++ b/docs/deep-dive-blog-posts.md
@@ -10,6 +10,7 @@
- [Performance Improvements in .NET 6](https://devblogs.microsoft.com/dotnet/performance-improvements-in-net-6/)
- [Performance Improvements in .NET 7](https://devblogs.microsoft.com/dotnet/performance_improvements_in_net_7/)
- [Performance Improvements in .NET 8](https://devblogs.microsoft.com/dotnet/performance-improvements-in-net-8/)
+- [Performance Improvements in .NET 9](https://devblogs.microsoft.com/dotnet/performance-improvements-in-net-9/)
### Posts that take a high-level look at the entire source:
diff --git a/docs/design/coreclr/botr/clr-abi.md b/docs/design/coreclr/botr/clr-abi.md
index 1347b2c0c6b..7da3805324e 100644
--- a/docs/design/coreclr/botr/clr-abi.md
+++ b/docs/design/coreclr/botr/clr-abi.md
@@ -23,11 +23,14 @@ ARM64: See [Overview of ARM64 ABI conventions](https://learn.microsoft.com/cpp/b
## Non-Windows ABI documentation
Arm corporation ABI documentation (for ARM32 and ARM64) is [here](https://developer.arm.com/architectures/system-architectures/software-standards/abi) and [here](https://github.com/ARM-software/abi-aa).
+Apple's ARM64 calling convention differences can be found [here](https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms).
The Linux System V x86_64 ABI is documented in [System V Application Binary Interface / AMD64 Architecture Processor Supplement](https://github.com/hjl-tools/x86-psABI/wiki/x86-64-psABI-1.0.pdf), with document source material [here](https://gitlab.com/x86-psABIs/x86-64-ABI).
The LoongArch64 ABI documentation is [here](https://github.com/loongson/LoongArch-Documentation/blob/main/docs/LoongArch-ELF-ABI-EN.adoc)
+The RISC-V ABIs Specification: [latest release](https://github.com/riscv-non-isa/riscv-elf-psabi-doc/releases/latest), [latest draft](https://github.com/riscv-non-isa/riscv-elf-psabi-doc/releases), [document source repo](https://github.com/riscv-non-isa/riscv-elf-psabi-doc).
+
# General Unwind/Frame Layout
For all non-x86 platforms, all methods must have unwind information so the garbage collector (GC) can unwind them (unlike native code in which a leaf method may be omitted).
@@ -76,7 +79,9 @@ On ARM and ARM64, just like native, nothing is put in the floating point registe
However, unlike native varargs, all floating point arguments are not promoted to double (`R8`), and instead retain their original type (`R4` or `R8`) (although this does not preclude an IL generator like managed C++ from explicitly injecting an upcast at the call-site and adjusting the call-site-sig appropriately). This leads to unexpected behavior when native C++ is ported to C# or even just managed via the different flavors of managed C++.
-Managed varargs are not supported in .NET Core.
+Managed varargs are supported on Windows only.
+
+Managed/native varargs are supported on Windows only. Support for managed/native varargs on non-Windows platforms is tracked by [this issue](https://github.com/dotnet/runtime/issues/82081).
## Generics
@@ -97,6 +102,10 @@ Just like native, AMD64 has implicit-byrefs. Any structure (value type in IL par
The AMD64 native calling conventions (Windows 64 and System V) require return buffer address to be returned by callee in RAX. JIT also follows this rule.
+## RISC-V only: structs passed/returned according to hardware floating-point calling convention
+
+Passing/returning structs according to hardware floating-point calling convention like native is currently [supported only up to 16 bytes](https://github.com/dotnet/runtime/issues/107386), ones larger than that differ from the standard ABI and are passed/returned according to integer calling convention (by implicit reference).
+
## Return buffers
The same applies to some return buffers. See `MethodTable::IsStructRequiringStackAllocRetBuf()`. When that returns `false`, the return buffer might be on the heap, either due to reflection/remoting code paths mentioned previously or due to a JIT optimization where a call with a return buffer that then assigns to a field (on the GC heap) are changed into passing the field reference as the return buffer. Conversely, when it returns true, the JIT does not need to use a write barrier when storing to the return buffer, but it is still not guaranteed to be a compiler temp, and as such the JIT should not introduce spurious writes to the return buffer.
@@ -117,6 +126,12 @@ ARM64-only: When a method returns a structure that is larger than 16 bytes the c
Primitive value types smaller than 32-bits are widened to 32-bits: signed small types are sign extended and unsigned small types are zero extended. This can be different from the standard calling conventions that may leave the state of unused bits in the return register undefined.
+## Small primitive arguments
+
+Small primitive arguments have undefined upper bits. This can be different from the standard calling conventions that may require normalization (e.g. on ARM32 and Apple ARM64).
+
+On RISC-V small primitive arguments are extended according to standard calling conventions.
+
# PInvokes
The convention is that any method with an InlinedCallFrame (either an IL stub or a normal method with an inlined PInvoke) saves/restores all non-volatile integer registers in its prolog/epilog respectively. This is done so that the InlinedCallFrame can just contain a return address, a stack pointer and a frame pointer. Then using just those three it can start a full stack walk using the normal RtlVirtualUnwind.
diff --git a/docs/design/datacontracts/EcmaMetadata.md b/docs/design/datacontracts/EcmaMetadata.md
new file mode 100644
index 00000000000..a5b445f10dc
--- /dev/null
+++ b/docs/design/datacontracts/EcmaMetadata.md
@@ -0,0 +1,340 @@
+# Contract EcmaMetadata
+
+This contract provides methods to get a view of the ECMA-335 metadata for a given module.
+
+## APIs of contract
+
+```csharp
+TargetSpan GetReadOnlyMetadataAddress(ModuleHandle handle);
+System.Reflection.Metadata.MetadataReader? GetMetadata(ModuleHandle handle);
+```
+
+Types from other contracts:
+
+| Type | Contract |
+|------|----------|
+| ModuleHandle | [Loader](./Loader.md#apis-of-contract) |
+
+## Version 1
+
+
+Data descriptors used:
+| Data Descriptor Name | Field | Meaning |
+| --- | --- | --- |
+| `Module` | `Base` | Pointer to start of PE file in memory |
+| `Module` | `DynamicMetadata` | Pointer to saved metadata for reflection emit modules |
+| `Module` | `FieldDefToDescMap` | Mapping table |
+| `DynamicMetadata` | `Size` | Size of the dynamic metadata blob (as a 32bit uint) |
+| `DynamicMetadata` | `Data` | Start of dynamic metadata data array |
+
+
+```csharp
+using System.IO;
+using System.Reflection.Metadata;
+using System.Runtime.InteropServices;
+
+TargetSpan GetReadOnlyMetadataAddress(ModuleHandle handle)
+{
+ TargetPointer baseAddress = Target.ReadPointer(handle.Address + /* Module::Base offset */);
+ if (baseAddress == TargetPointer.Null)
+ {
+ return default;
+ }
+
+ // Read CLR header per https://learn.microsoft.com/windows/win32/debug/pe-format
+ ulong clrHeaderRVA = ...
+
+ // Read Metadata per ECMA-335 II.25.3.3 CLI Header
+ ulong metadataDirectoryAddress = baseAddress + clrHeaderRva + /* offset to Metadata */
+ int rva = Target.Read(metadataDirectoryAddress);
+ ulong size = Target.Read(metadataDirectoryAddress + sizeof(int));
+ return new(baseAddress + rva, size);
+}
+
+MetadataReader? GetMetadata(ModuleHandle handle)
+{
+ AvailableMetadataType type = GetAvailableMetadataType(handle);
+
+ switch (type)
+ {
+ case AvailableMetadataType.None:
+ return null;
+ case AvailableMetadataType.ReadOnly:
+ {
+ TargetSpan address = GetReadOnlyMetadataAddress(handle);
+ byte[] data = new byte[address.Size];
+ _target.ReadBuffer(address.Address, data);
+ return MetadataReaderProvider.FromMetadataImage(ImmutableCollectionsMarshal.AsImmutableArray(data)).GetMetadataReader();
+ }
+ case AvailableMetadataType.ReadWriteSavedCopy:
+ {
+ TargetSpan address = GetReadWriteSavedMetadataAddress(handle);
+ byte[] data = new byte[address.Size];
+ _target.ReadBuffer(address.Address, data);
+ return MetadataReaderProvider.FromMetadataImage(ImmutableCollectionsMarshal.AsImmutableArray(data)).GetMetadataReader();
+ }
+ case AvailableMetadataType.ReadWrite:
+ {
+ var targetEcmaMetadata = GetReadWriteMetadata(handle);
+
+ // From the multiple different target spans, we need to build a single
+ // contiguous ECMA-335 metadata blob.
+ BlobBuilder builder = new BlobBuilder();
+ builder.WriteUInt32(0x424A5342);
+
+ // major version
+ builder.WriteUInt16(1);
+
+ // minor version
+ builder.WriteUInt16(1);
+
+ // reserved
+ builder.WriteUInt32(0);
+
+ string version = targetEcmaMetadata.Schema.MetadataVersion;
+ builder.WriteInt32(AlignUp(version.Length, 4));
+ Write4ByteAlignedString(builder, version);
+
+ // reserved
+ builder.WriteUInt16(0);
+
+ // number of streams
+ ushort numStreams = 5; // #Strings, #US, #Blob, #GUID, #~ (metadata)
+ if (targetEcmaMetadata.Schema.VariableSizedColumnsAreAll4BytesLong)
+ {
+ // We direct MetadataReader to use 4-byte encoding for all variable-sized columns
+ // by providing the marker stream for a "minimal delta" image.
+ numStreams++;
+ }
+ builder.WriteUInt16(numStreams);
+
+ // Write Stream headers
+ if (targetEcmaMetadata.Schema.VariableSizedColumnsAreAll4BytesLong)
+ {
+ // Write the #JTD stream to indicate that all variable-sized columns are 4 bytes long.
+ WriteStreamHeader(builder, "#JTD", 0).WriteInt32(builder.Count);
+ }
+
+ BlobWriter stringsOffset = WriteStreamHeader(builder, "#Strings", (int)AlignUp(targetEcmaMetadata.StringHeap.Size, 4ul));
+ BlobWriter blobOffset = WriteStreamHeader(builder, "#Blob", (int)targetEcmaMetadata.BlobHeap.Size);
+ BlobWriter guidOffset = WriteStreamHeader(builder, "#GUID", (int)targetEcmaMetadata.GuidHeap.Size);
+ BlobWriter userStringOffset = WriteStreamHeader(builder, "#US", (int)targetEcmaMetadata.UserStringHeap.Size);
+
+ // We'll use the "uncompressed" tables stream name as the runtime may have created the *Ptr tables
+ // that are only present in the uncompressed tables stream.
+ BlobWriter tablesOffset = WriteStreamHeader(builder, "#-", 0);
+
+ // Write the heap-style Streams
+
+ stringsOffset.WriteInt32(builder.Count);
+ WriteTargetSpan(builder, targetEcmaMetadata.StringHeap);
+ for (ulong i = targetEcmaMetadata.StringHeap.Size; i < AlignUp(targetEcmaMetadata.StringHeap.Size, 4ul); i++)
+ {
+ builder.WriteByte(0);
+ }
+
+ blobOffset.WriteInt32(builder.Count);
+ WriteTargetSpan(builder, targetEcmaMetadata.BlobHeap);
+
+ guidOffset.WriteInt32(builder.Count);
+ WriteTargetSpan(builder, targetEcmaMetadata.GuidHeap);
+
+ userStringOffset.WriteInt32(builder.Count);
+ WriteTargetSpan(builder, targetEcmaMetadata.UserStringHeap);
+
+ // Write tables stream
+ tablesOffset.WriteInt32(builder.Count);
+
+ // Write tables stream header
+ builder.WriteInt32(0); // reserved
+ builder.WriteByte(2); // major version
+ builder.WriteByte(0); // minor version
+ uint heapSizes =
+ (targetEcmaMetadata.Schema.LargeStringHeap ? 1u << 0 : 0) |
+ (targetEcmaMetadata.Schema.LargeBlobHeap ? 1u << 1 : 0) |
+ (targetEcmaMetadata.Schema.LargeGuidHeap ? 1u << 2 : 0);
+
+ builder.WriteByte((byte)heapSizes);
+ builder.WriteByte(1); // reserved
+
+ ulong validTables = 0;
+ for (int i = 0; i < targetEcmaMetadata.Schema.RowCount.Length; i++)
+ {
+ if (targetEcmaMetadata.Schema.RowCount[i] != 0)
+ {
+ validTables |= 1ul << i;
+ }
+ }
+
+ ulong sortedTables = 0;
+ for (int i = 0; i < targetEcmaMetadata.Schema.IsSorted.Length; i++)
+ {
+ if (targetEcmaMetadata.Schema.IsSorted[i])
+ {
+ sortedTables |= 1ul << i;
+ }
+ }
+
+ builder.WriteUInt64(validTables);
+ builder.WriteUInt64(sortedTables);
+
+ foreach (int rowCount in targetEcmaMetadata.Schema.RowCount)
+ {
+ if (rowCount > 0)
+ {
+ builder.WriteInt32(rowCount);
+ }
+ }
+
+ // Write the tables
+ foreach (TargetSpan span in targetEcmaMetadata.Tables)
+ {
+ WriteTargetSpan(builder, span);
+ }
+
+ MemoryStream metadataStream = new MemoryStream();
+ builder.WriteContentTo(metadataStream);
+ return MetadataReaderProvider.FromMetadataStream(metadataStream).GetMetadataReader();
+
+ void WriteTargetSpan(BlobBuilder builder, TargetSpan span)
+ {
+ Blob blob = builder.ReserveBytes(checked((int)span.Size));
+ _target.ReadBuffer(span.Address, blob.GetBytes().AsSpan());
+ }
+
+ static BlobWriter WriteStreamHeader(BlobBuilder builder, string name, int size)
+ {
+ BlobWriter offset = new(builder.ReserveBytes(4));
+ builder.WriteInt32(size);
+ Write4ByteAlignedString(builder, name);
+ return offset;
+ }
+
+ static void Write4ByteAlignedString(BlobBuilder builder, string value)
+ {
+ int bufferStart = builder.Count;
+ builder.WriteUTF8(value);
+ builder.WriteByte(0);
+ int stringEnd = builder.Count;
+ for (int i = stringEnd; i < bufferStart + AlignUp(value.Length, 4); i++)
+ {
+ builder.WriteByte(0);
+ }
+ }
+ }
+ }
+}
+```
+
+### Helper Methods
+
+``` csharp
+using System;
+using System.Numerics;
+
+struct EcmaMetadataSchema
+{
+ public EcmaMetadataSchema(string metadataVersion, bool largeStringHeap, bool largeBlobHeap, bool largeGuidHeap, int[] rowCount, bool[] isSorted, bool variableSizedColumnsAre4BytesLong)
+ {
+ MetadataVersion = metadataVersion;
+ LargeStringHeap = largeStringHeap;
+ LargeBlobHeap = largeBlobHeap;
+ LargeGuidHeap = largeGuidHeap;
+
+ _rowCount = rowCount;
+ _isSorted = isSorted;
+
+ VariableSizedColumnsAreAll4BytesLong = variableSizedColumnsAre4BytesLong;
+ }
+
+ public readonly string MetadataVersion;
+
+ public readonly bool LargeStringHeap;
+ public readonly bool LargeBlobHeap;
+ public readonly bool LargeGuidHeap;
+
+ // Table data, these structures hold MetadataTable.Count entries
+ private readonly int[] _rowCount;
+ public readonly ReadOnlySpan RowCount => _rowCount;
+
+ private readonly bool[] _isSorted;
+ public readonly ReadOnlySpan IsSorted => _isSorted;
+
+ // In certain scenarios the size of the tables is forced to be the maximum size
+ // Otherwise the size of columns should be computed based on RowSize/the various heap flags
+ public readonly bool VariableSizedColumnsAreAll4BytesLong;
+}
+
+class TargetEcmaMetadata
+{
+ public TargetEcmaMetadata(EcmaMetadataSchema schema,
+ TargetSpan[] tables,
+ TargetSpan stringHeap,
+ TargetSpan userStringHeap,
+ TargetSpan blobHeap,
+ TargetSpan guidHeap)
+ {
+ Schema = schema;
+ _tables = tables;
+ StringHeap = stringHeap;
+ UserStringHeap = userStringHeap;
+ BlobHeap = blobHeap;
+ GuidHeap = guidHeap;
+ }
+
+ public EcmaMetadataSchema Schema { get; init; }
+
+ private TargetSpan[] _tables;
+ public ReadOnlySpan Tables => _tables;
+ public TargetSpan StringHeap { get; init; }
+ public TargetSpan UserStringHeap { get; init; }
+ public TargetSpan BlobHeap { get; init; }
+ public TargetSpan GuidHeap { get; init; }
+}
+
+[Flags]
+enum AvailableMetadataType
+{
+ None = 0,
+ ReadOnly = 1,
+ ReadWriteSavedCopy = 2,
+ ReadWrite = 4
+}
+
+AvailableMetadataType GetAvailableMetadataType(ModuleHandle handle)
+{
+ Data.Module module = new Data.Module(Target, handle.Address);
+
+ AvailableMetadataType flags = AvailableMetadataType.None;
+
+ TargetPointer dynamicMetadata = Target.ReadPointer(handle.Address + /* Module::DynamicMetadata offset */);
+
+ if (dynamicMetadata != TargetPointer.Null)
+ flags |= AvailableMetadataType.ReadWriteSavedCopy;
+ else
+ flags |= AvailableMetadataType.ReadOnly;
+
+ return flags;
+}
+
+TargetSpan GetReadWriteSavedMetadataAddress(ModuleHandle handle)
+{
+ Data.Module module = new Data.Module(Target, handle.Address);
+ TargetPointer dynamicMetadata = Target.ReadPointer(handle.Address + /* Module::DynamicMetadata offset */);
+
+ ulong size = Target.Read(handle.Address + /* DynamicMetadata::Size offset */);
+ TargetPointer result = handle.Address + /* DynamicMetadata::Data offset */;
+ return new(result, size);
+}
+
+TargetEcmaMetadata GetReadWriteMetadata(ModuleHandle handle)
+{
+ // [cdac] TODO.
+}
+
+T AlignUp(T input, T alignment)
+ where T : IBinaryInteger
+{
+ return input + (alignment - T.One) & ~(alignment - T.One);
+}
+```
diff --git a/docs/design/datacontracts/Loader.md b/docs/design/datacontracts/Loader.md
index bc45eaee7a6..d0fe390b5ad 100644
--- a/docs/design/datacontracts/Loader.md
+++ b/docs/design/datacontracts/Loader.md
@@ -25,89 +25,18 @@ record struct ModuleLookupTables(
TargetPointer MemberRefToDesc,
TargetPointer MethodDefToDesc,
TargetPointer TypeDefToMethodTable,
- TargetPointer TypeRefToMethodTable);
-
-internal struct EcmaMetadataSchema
-{
- public EcmaMetadataSchema(string metadataVersion, bool largeStringHeap, bool largeBlobHeap, bool largeGuidHeap, int[] rowCount, bool[] isSorted, bool variableSizedColumnsAre4BytesLong)
- {
- MetadataVersion = metadataVersion;
- LargeStringHeap = largeStringHeap;
- LargeBlobHeap = largeBlobHeap;
- LargeGuidHeap = largeGuidHeap;
-
- _rowCount = rowCount;
- _isSorted = isSorted;
-
- VariableSizedColumnsAreAll4BytesLong = variableSizedColumnsAre4BytesLong;
- }
-
- public readonly string MetadataVersion;
-
- public readonly bool LargeStringHeap;
- public readonly bool LargeBlobHeap;
- public readonly bool LargeGuidHeap;
-
- // Table data, these structures hold MetadataTable.Count entries
- private readonly int[] _rowCount;
- public readonly ReadOnlySpan RowCount => _rowCount;
-
- private readonly bool[] _isSorted;
- public readonly ReadOnlySpan IsSorted => _isSorted;
-
- // In certain scenarios the size of the tables is forced to be the maximum size
- // Otherwise the size of columns should be computed based on RowSize/the various heap flags
- public readonly bool VariableSizedColumnsAreAll4BytesLong;
-}
-
-internal class TargetEcmaMetadata
-{
- public TargetEcmaMetadata(EcmaMetadataSchema schema,
- TargetSpan[] tables,
- TargetSpan stringHeap,
- TargetSpan userStringHeap,
- TargetSpan blobHeap,
- TargetSpan guidHeap)
- {
- Schema = schema;
- _tables = tables;
- StringHeap = stringHeap;
- UserStringHeap = userStringHeap;
- BlobHeap = blobHeap;
- GuidHeap = guidHeap;
- }
-
- public EcmaMetadataSchema Schema { get; init; }
-
- private TargetSpan[] _tables;
- public ReadOnlySpan Tables => _tables;
- public TargetSpan StringHeap { get; init; }
- public TargetSpan UserStringHeap { get; init; }
- public TargetSpan BlobHeap { get; init; }
- public TargetSpan GuidHeap { get; init; }
-}
-
-[Flags]
-internal enum AvailableMetadataType
-{
- None = 0,
- ReadOnly = 1,
- ReadWriteSavedCopy = 2,
- ReadWrite = 4
-}
+ TargetPointer TypeRefToMethodTable,
+ TargetPointer MethodDefToILCodeVersioningState);
```
``` csharp
-ModuleHandle GetModuleHandle(TargetPointer);
+ModuleHandle GetModuleHandle(TargetPointer module);
TargetPointer GetAssembly(ModuleHandle handle);
ModuleFlags GetFlags(ModuleHandle handle);
+string GetPath(ModuleHandle handle);
TargetPointer GetLoaderAllocator(ModuleHandle handle);
TargetPointer GetThunkHeap(ModuleHandle handle);
TargetPointer GetILBase(ModuleHandle handle);
-TargetPointer GetMetadataAddress(ModuleHandle handle, out ulong size);
-AvailableMetadataType GetAvailableMetadataType(ModuleHandle handle);
-TargetPointer GetReadWriteSavedMetadataAddress(ModuleHandle handle, out ulong size);
-TargetEcmaMetadata GetReadWriteMetadata(ModuleHandle handle);
ModuleLookupTables GetLookupTables(ModuleHandle handle);
```
@@ -121,15 +50,14 @@ Data descriptors used:
| `Module` | `Flags` | Assembly of the Module |
| `Module` | `LoaderAllocator` | LoaderAllocator of the Module |
| `Module` | `ThunkHeap` | Pointer to the thunk heap |
-| `Module` | `DynamicMetadata` | Pointer to saved metadata for reflection emit modules |
+| `Module` | `Path` | Path of the Module (UTF-16, null-terminated) |
| `Module` | `FieldDefToDescMap` | Mapping table |
| `Module` | `ManifestModuleReferencesMap` | Mapping table |
| `Module` | `MemberRefToDescMap` | Mapping table |
| `Module` | `MethodDefToDescMap` | Mapping table |
| `Module` | `TypeDefToMethodTableMap` | Mapping table |
| `Module` | `TypeRefToMethodTableMap` | Mapping table |
-| `DynamicMetadata` | `Size` | Size of the dynamic metadata blob (as a 32bit uint) |
-| `DynamicMetadata` | `Data` | Start of dynamic metadata data array |
+| `ModuleLookupMap` | `TableData` | Start of the mapping table's data |
``` csharp
ModuleHandle GetModuleHandle(TargetPointer modulePointer)
@@ -147,6 +75,13 @@ ModuleFlags GetFlags(ModuleHandle handle)
return target.Read(handle.Address + /* Module::Flags offset */);
}
+string GetPath(ModuleHandle handle)
+{
+ TargetPointer pathStart = target.ReadPointer(handle.Address + /* Module::Path offset */);
+ char[] path = // Read from target starting at pathStart until null terminator
+ return new string(path);
+}
+
TargetPointer GetLoaderAllocator(ModuleHandle handle)
{
return target.ReadPointer(handle.Address + /* Module::LoaderAllocator offset */);
@@ -162,51 +97,6 @@ TargetPointer GetILBase(ModuleHandle handle)
return target.ReadPointer(handle.Address + /* Module::Base offset */);
}
-TargetPointer GetMetadataAddress(ModuleHandle handle, out ulong size)
-{
- TargetPointer baseAddress = GetILBase(handle);
- if (baseAddress == TargetPointer.Null)
- {
- size = 0;
- return TargetPointer.Null;
- }
-
- // Read CLR header per https://learn.microsoft.com/windows/win32/debug/pe-format
- ulong clrHeaderRVA = ...
-
- // Read Metadata per ECMA-335 II.25.3.3 CLI Header
- ulong metadataDirectoryAddress = baseAddress + clrHeaderRva + /* offset to Metadata */
- int rva = target.Read(metadataDirectoryAddress);
- size = target.Read(metadataDirectoryAddress + sizeof(int));
- return baseAddress + rva;
-}
-
-AvailableMetadataType ILoader.GetAvailableMetadataType(ModuleHandle handle)
-{
- Data.Module module = _target.ProcessedData.GetOrAdd(handle.Address);
-
- AvailableMetadataType flags = AvailableMetadataType.None;
-
- TargetPointer dynamicMetadata = target.ReadPointer(handle.Address + /* Module::DynamicMetadata offset */);
-
- if (dynamicMetadata != TargetPointer.Null)
- flags |= AvailableMetadataType.ReadWriteSavedCopy;
- else
- flags |= AvailableMetadataType.ReadOnly;
-
- return flags;
-}
-
-TargetPointer ILoader.GetReadWriteSavedMetadataAddress(ModuleHandle handle, out ulong size)
-{
- Data.Module module = _target.ProcessedData.GetOrAdd(handle.Address);
- TargetPointer dynamicMetadata = target.ReadPointer(handle.Address + /* Module::DynamicMetadata offset */);
-
- size = target.Read(handle.Address + /* DynamicMetadata::Size offset */);
- TargetPointer result = handle.Address + /* DynamicMetadata::Data offset */;
- return result;
-}
-
ModuleLookupTables GetLookupTables(ModuleHandle handle)
{
return new ModuleLookupTables(
@@ -215,6 +105,8 @@ ModuleLookupTables GetLookupTables(ModuleHandle handle)
MemberRefToDescMap: target.ReadPointer(handle.Address + /* Module::MemberRefToDescMap */),
MethodDefToDescMap: target.ReadPointer(handle.Address + /* Module::MethodDefToDescMap */),
TypeDefToMethodTableMap: target.ReadPointer(handle.Address + /* Module::TypeDefToMethodTableMap */),
- TypeRefToMethodTableMap: target.ReadPointer(handle.Address + /* Module::TypeRefToMethodTableMap */));
+ TypeRefToMethodTableMap: target.ReadPointer(handle.Address + /* Module::TypeRefToMethodTableMap */),
+ MethodDefToILCodeVersioningState: target.ReadPointer(handle.Address + /*
+ Module::MethodDefToILCodeVersioningState */));
}
```
diff --git a/docs/design/datacontracts/RuntimeTypeSystem.md b/docs/design/datacontracts/RuntimeTypeSystem.md
index 5693ab205ce..8accd2e6dc6 100644
--- a/docs/design/datacontracts/RuntimeTypeSystem.md
+++ b/docs/design/datacontracts/RuntimeTypeSystem.md
@@ -23,6 +23,7 @@ internal enum CorElementType
// Values defined in ECMA-335 - II.23.1.16 Element types used in signatures
// +
Internal = 0x21, // Indicates that the next pointer sized number of bytes is the address of a TypeHandle. Signatures that contain the Internal CorElementType cannot exist in metadata that is saved into a serialized format.
+ CModInternal = 0x22, // Indicates that the next byte specifies if the modifier is required and the next pointer sized number of bytes after that is the address of a TypeHandle. Signatures that contain the CModInternal CorElementType cannot exist in metadata that is saved into a seralized format.
}
```
@@ -92,12 +93,51 @@ struct MethodDescHandle
}
```
+```csharp
+public enum ArrayFunctionType
+{
+ Get = 0,
+ Set = 1,
+ Address = 2,
+ Constructor = 3
+}
+```
+
```csharp
partial interface IRuntimeTypeSystem : IContract
{
public virtual MethodDescHandle GetMethodDescHandle(TargetPointer methodDescPointer);
public virtual TargetPointer GetMethodTable(MethodDescHandle methodDesc);
+
+ // Return true for an uninstantiated generic method
+ public virtual bool IsGenericMethodDefinition(MethodDescHandle methodDesc);
+
+ public virtual ReadOnlySpan GetGenericMethodInstantiation(MethodDescHandle methodDesc);
+
+ // Return mdTokenNil (0x06000000) if the method doesn't have a token, otherwise return the token of the method
+ public virtual uint GetMethodToken(MethodDescHandle methodDesc);
+
+ // Return true if a MethodDesc represents an array method
+ // An array method is also a StoredSigMethodDesc
+ public virtual bool IsArrayMethod(MethodDescHandle methodDesc, out ArrayFunctionType functionType);
+
+ // Return true if a MethodDesc represents a method without metadata method, either an IL Stub dynamically
+ // generated by the runtime, or a MethodDesc that describes a method represented by the System.Reflection.Emit.DynamicMethod class
+ // Or something else similar.
+ // A no metadata method is also a StoredSigMethodDesc
+ public virtual bool IsNoMetadataMethod(MethodDescHandle methodDesc, out string methodName);
+
+ // A StoredSigMethodDesc is a MethodDesc for which the signature isn't found in metadata.
+ public virtual bool IsStoredSigMethodDesc(MethodDescHandle methodDesc, out ReadOnlySpan signature);
+
+ // Return true for a MethodDesc that describes a method represented by the System.Reflection.Emit.DynamicMethod class
+ // A DynamicMethod is also a StoredSigMethodDesc, and a NoMetadataMethod
+ public virtual bool IsDynamicMethod(MethodDescHandle methodDesc);
+
+ // Return true if a MethodDesc represents an IL Stub dynamically generated by the runtime
+ // A IL Stub method is also a StoredSigMethodDesc, and a NoMetadataMethod
+ public virtual bool IsILStub(MethodDescHandle methodDesc);
}
```
@@ -293,6 +333,7 @@ The contract additionally depends on these data descriptors
| `FnPtrTypeDesc` | `NumArgs` | Number of arguments to the function described by the `TypeDesc` |
| `FnPtrTypeDesc` | `CallConv` | Lower 8 bits is the calling convention bit as extracted by the signature that defines this `TypeDesc` |
| `FnPtrTypeDesc` | `RetAndArgTypes` | Pointer to an array of TypeHandle addresses. This length of this is 1 more than `NumArgs` |
+| `GenericsDictInfo` | `NumDicts` | Number of instantiation dictionaries, including inherited ones, in this `GenericsDictInfo` |
| `GenericsDictInfo` | `NumTypeArgs` | Number of type arguments in the type or method instantiation described by this `GenericsDictInfo` |
@@ -563,7 +604,8 @@ The version 1 `MethodDesc` APIs depend on the `MethodDescAlignment` global and t
| Global name | Meaning |
| --- | --- |
-| `MethodDescAlignment` | `MethodDescChunk` trailing data is allocated in multiples of this constant. The size (in bytes) of each `MethodDesc` (or subclass) instance is a multiple of this constant.
+| `MethodDescAlignment` | `MethodDescChunk` trailing data is allocated in multiples of this constant. The size (in bytes) of each `MethodDesc` (or subclass) instance is a multiple of this constant. |
+| `MethodDescTokenRemainderBitCount` | Number of bits in the token remainder in `MethodDesc` |
In the runtime a `MethodDesc` implicitly belongs to a single `MethodDescChunk` and some common data is shared between method descriptors that belong to the same chunk. A single method table
@@ -572,12 +614,211 @@ will typically have multiple chunks. There are subkinds of MethodDescs at runti
We depend on the following data descriptors:
| Data Descriptor Name | Field | Meaning |
| --- | --- | --- |
-| `MethodDesc` | `ChunkIndex` | Offset of this `MethodDesc` relative to the end of its containing `MethodDescChunk` - in multiples of `MethodDescAlignment`
-| `MethodDesc` | `Slot` | The method's slot
-| `MethodDesc` | `Flags` | The method's flags
-| `MethodDescChunk` | `MethodTable` | The method table set of methods belongs to
-| `MethodDescChunk` | `Next` | The next chunk of methods
-| `MethodDescChunk` | `Size` | The size of this `MethodDescChunk` following this `MethodDescChunk` header, minus 1. In multiples of `MethodDescAlignment`
-| `MethodDescChunk` | `Count` | The number of `MethodDesc` entries in this chunk, minus 1.
+| `MethodDesc` | `ChunkIndex` | Offset of this `MethodDesc` relative to the end of its containing `MethodDescChunk` - in multiples of `MethodDescAlignment` |
+| `MethodDesc` | `Slot` | The method's slot |
+| `MethodDesc` | `Flags` | The method's flags |
+| `MethodDesc` | `Flags3AndTokenRemainder` | More flags for the method, and the low bits of the method's token's RID |
+| `MethodDescChunk` | `MethodTable` | The method table set of methods belongs to |
+| `MethodDescChunk` | `Next` | The next chunk of methods |
+| `MethodDescChunk` | `Size` | The size of this `MethodDescChunk` following this `MethodDescChunk` header, minus 1. In multiples of `MethodDescAlignment` |
+| `MethodDescChunk` | `Count` | The number of `MethodDesc` entries in this chunk, minus 1. |
+| `MethodDescChunk` | `FlagsAndTokenRange` | `MethodDescChunk` flags, and the upper bits of the method token's RID |
+| `InstantiatedMethodDesc` | `PerInstInfo` | The pointer to the method's type arguments |
+| `InstantiatedMethodDesc` | `Flags2` | Flags for the `InstantiatedMethodDesc` |
+| `InstantiatedMethodDesc` | `NumGenericArgs` | How many generic args the method has |
+| `StoredSigMethodDesc` | `Sig` | Pointer to a metadata signature |
+| `StoredSigMethodDesc` | `cSig` | Count of bytes in the metadata signature |
+| `StoredSigMethodDesc` | `ExtendedFlags` | Flags field for the `StoredSigMethodDesc` |
+| `DynamicMethodDesc` | `MethodName` | Pointer to Null-terminated UTF8 string describing the Method desc |
+
+
+And the following enumeration definitions
+
+```csharp
+ internal enum MethodDescClassification
+ {
+ IL = 0, // IL
+ FCall = 1, // FCall (also includes tlbimped ctor, Delegate ctor)
+ PInvoke = 2, // PInvoke method
+ EEImpl = 3, // special method; implementation provided by EE (like Delegate Invoke)
+ Array = 4, // Array ECall
+ Instantiated = 5, // Instantiated generic methods, including descriptors
+ // for both shared and unshared code (see InstantiatedMethodDesc)
+ ComInterop = 6,
+ Dynamic = 7, // for method desc with no metadata behind
+ }
+
+ [Flags]
+ internal enum MethodDescFlags : ushort
+ {
+ ClassificationMask = 0x7,
+ HasNonVtableSlot = 0x0008,
+ }
+
+ internal enum InstantiatedMethodDescFlags2 : ushort
+ {
+ KindMask = 0x07,
+ GenericMethodDefinition = 0x01,
+ UnsharedMethodInstantiation = 0x02,
+ SharedMethodInstantiation = 0x03,
+ WrapperStubWithInstantiations = 0x04,
+ }
+
+ [Flags]
+ internal enum DynamicMethodDescExtendedFlags : uint
+ {
+ IsLCGMethod = 0x00004000,
+ IsILStub = 0x00008000,
+ }
+```
+
+
+And the various apis are implemented with the following algorithms
+
+```csharp
+ public bool IsGenericMethodDefinition(MethodDescHandle methodDescHandle)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Instantiated)
+ return false;
+
+ ushort Flags2 = // Read Flags2 field from InstantiatedMethodDesc contract using address methodDescHandle.Address
+
+ return ((int)Flags2 & (int)InstantiatedMethodDescFlags2.KindMask) == (int)InstantiatedMethodDescFlags2.GenericMethodDefinition;
+ }
+
+ public ReadOnlySpan GetGenericMethodInstantiation(MethodDescHandle methodDescHandle)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Instantiated)
+ return default;
+
+ TargetPointer dictionaryPointer = // Read PerInstInfo field from InstantiatedMethodDesc contract using address methodDescHandle.Address
+ if (dictionaryPointer == 0)
+ return default;
+
+ int NumTypeArgs = // Read NumGenericArgs from methodDescHandle.Address using InstantiatedMethodDesc contract
+ TypeHandle[] instantiation = new TypeHandle[NumTypeArgs];
+ for (int i = 0; i < NumTypeArgs; i++)
+ instantiation[i] = GetTypeHandle(_target.ReadPointer(dictionaryPointer + _target.PointerSize * i));
+
+ return instantiation;
+ }
+
+ public uint GetMethodToken(MethodDescHandle methodDescHandle)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ TargetPointer methodDescChunk = // Using ChunkIndex from methodDesc, compute the wrapping MethodDescChunk
+
+ ushort Flags3AndTokenRemainder = // Read Flags3AndTokenRemainder field from MethodDesc contract using address methodDescHandle.Address
+ ushort FlagsAndTokenRange = // Read FlagsAndTokenRange field from MethodDescChunk contract using address methodDescChunk
+
+ int tokenRemainderBitCount = _target.ReadGlobal(Constants.Globals.MethodDescTokenRemainderBitCount);
+ int tokenRangeBitCount = 24 - tokenRemainderBitCount;
+ uint allRidBitsSet = 0xFFFFFF;
+ uint tokenRemainderMask = allRidBitsSet >> tokenRangeBitCount;
+ uint tokenRangeMask = allRidBitsSet >> tokenRemainderBitCount;
+
+ uint tokenRemainder = (uint)(_desc.Flags3AndTokenRemainder & tokenRemainderMask);
+ uint tokenRange = ((uint)(_chunk.FlagsAndTokenRange & tokenRangeMask)) << tokenRemainderBitCount;
+
+ return 0x06000000 | tokenRange | tokenRemainder;
+ }
+
+ public bool IsArrayMethod(MethodDescHandle methodDescHandle, out ArrayFunctionType functionType)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Array)
+ {
+ functionType = default;
+ return false;
+ }
+
+ int arrayMethodIndex = methodDesc.Slot - GetNumVtableSlots(GetTypeHandle(methodDesc.MethodTable));
+
+ functionType = arrayMethodIndex switch
+ {
+ 0 => ArrayFunctionType.Get,
+ 1 => ArrayFunctionType.Set,
+ 2 => ArrayFunctionType.Address,
+ > 3 => ArrayFunctionType.Constructor,
+ _ => throw new InvalidOperationException()
+ };
+
+ return true;
+ }
+
+ public bool IsNoMetadataMethod(MethodDescHandle methodDescHandle, out ReadOnlySpan methodName)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Dynamic)
+ {
+ methodName = default;
+ return false;
+ }
+
+ TargetPointer methodNamePointer = // Read MethodName field from DynamicMethodDesc contract using address methodDescHandle.Address
+
+ methodName = // ReadBuffer from target of a utf8 null terminated string, starting at address methodNamePointer
+ return true;
+ }
+
+ public bool IsStoredSigMethodDesc(MethodDescHandle methodDescHandle, out ReadOnlySpan signature)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ switch (methodDesc.Classification)
+ {
+ case MethodDescClassification.Dynamic:
+ case MethodDescClassification.EEImpl:
+ case MethodDescClassification.Array:
+ break; // These have stored sigs
+
+ default:
+ signature = default;
+ return false;
+ }
+
+ TargetPointer Sig = // Read Sig field from StoredSigMethodDesc contract using address methodDescHandle.Address
+ uint cSig = // Read cSig field from StoredSigMethodDesc contract using address methodDescHandle.Address
+
+ TargetPointer methodNamePointer = // Read S field from DynamicMethodDesc contract using address methodDescHandle.Address
+ signature = // Read buffer from target memory starting at address Sig, with cSig bytes in it.
+ return true;
+ }
+
+ public bool IsDynamicMethod(MethodDescHandle methodDescHandle)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Dynamic)
+ {
+ return false;
+ }
+
+ uint ExtendedFlags = // Read ExtendedFlags field from StoredSigMethodDesc contract using address methodDescHandle.Address
+
+ return ((DynamicMethodDescExtendedFlags)ExtendedFlags).HasFlag(DynamicMethodDescExtendedFlags.IsLCGMethod);
+ }
+
+ public bool IsILStub(MethodDescHandle methodDescHandle)
+ {
+ MethodDesc methodDesc = _methodDescs[methodDescHandle.Address];
+
+ if (methodDesc.Classification != MethodDescClassification.Dynamic)
+ {
+ return false;
+ }
+
+ uint ExtendedFlags = // Read ExtendedFlags field from StoredSigMethodDesc contract using address methodDescHandle.Address
+
+ return ((DynamicMethodDescExtendedFlags)ExtendedFlags).HasFlag(DynamicMethodDescExtendedFlags.IsILStub);
+ }
+```
**TODO(cdac)**
diff --git a/docs/design/datacontracts/contract_csharp_api_design.cs b/docs/design/datacontracts/contract_csharp_api_design.cs
index a770c1f4576..272645ec482 100644
--- a/docs/design/datacontracts/contract_csharp_api_design.cs
+++ b/docs/design/datacontracts/contract_csharp_api_design.cs
@@ -37,6 +37,18 @@ struct TargetPointer
// Add a full set of operators to support pointer arithmetic
}
+ public readonly struct TargetSpan
+ {
+ public TargetSpan(TargetPointer address, ulong size)
+ {
+ Address = address;
+ Size = size;
+ }
+
+ public TargetPointer Address { get; }
+ public ulong Size { get; }
+ }
+
struct TargetNInt
{
public long Value;
diff --git a/docs/design/features/globalization-hybrid-mode.md b/docs/design/features/globalization-hybrid-mode.md
index 2270fe897c1..bdbdb27b09e 100644
--- a/docs/design/features/globalization-hybrid-mode.md
+++ b/docs/design/features/globalization-hybrid-mode.md
@@ -270,13 +270,12 @@ Dependencies:
Web API does not expose locale-sensitive endsWith/startsWith function. As a workaround, both strings get normalized and weightless characters are removed. Resulting strings are cut to the same length and comparison is performed. This approach, beyond having the same compare option limitations as described under **String comparison**, has additional limitations connected with the workaround used. Because we are normalizing strings to be able to cut them, we cannot calculate the match length on the original strings. Methods that calculate this information throw PlatformNotSupported exception:
-- [CompareInfo.IsPrefix](https://learn.microsoft.com/dotnet/api/system.globalization.compareinfo.isprefix?view=net-8.0#system-globalization-compareinfo-isprefix(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
-- [CompareInfo.IsSuffix](https://learn.microsoft.com/dotnet/api/system.globalization.compareinfo.issuffix?view=net-8.0#system-globalization-compareinfo-issuffix(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+- [CompareInfo.IsPrefix](https://learn.microsoft.com/dotnet/api/system.globalization.compareinfo.isprefix?view=#system-globalization-compareinfo-isprefix(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+- [CompareInfo.IsSuffix](https://learn.microsoft.com/dotnet/api/system.globalization.compareinfo.issuffix?view=system-globalization-compareinfo-issuffix(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
- `IgnoreSymbols`
Only comparisons that do not skip character types are allowed. E.g. `IgnoreSymbols` skips symbol-chars in comparison/indexing. All `CompareOptions` combinations that include `IgnoreSymbols` throw `PlatformNotSupportedException`.
-
**String indexing**
Affected public APIs:
@@ -287,6 +286,15 @@ Affected public APIs:
Web API does not expose locale-sensitive indexing function. There is a discussion on adding it: https://github.com/tc39/ecma402/issues/506. In the current state, as a workaround, locale-sensitive string segmenter combined with locale-sensitive comparison is used. This approach, beyond having the same compare option limitations as described under **String comparison**, has additional limitations connected with the workaround used. Information about additional limitations:
+- Methods that calculate `matchLength` always return throw PlatformNotSupported exception:
+
+[CompareInfo.IndexOf](https://learn.microsoft.com/en-us/dotnet/api/system.globalization.compareinfo.indexof?view=system-globalization-compareinfo-indexof(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+
+[CompareInfo.LastIndexOf](https://learn.microsoft.com/en-us/dotnet/api/system.globalization.compareinfo.lastindexof?view=system-globalization-compareinfo-lastindexof(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+
+- String.Replace that uses `StringComparison` argument relies internally on IndexOf with `matchLength` argument. From this reason, it throws PlatformNotSupportedException:
+[String.Replace](https://learn.microsoft.com/en-us/dotnet/api/system.string.replace?view=system-string-replace(system-string-system-string-system-stringcomparison))
+
- Support depends on [`Intl.segmenter's support`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter#browser_compatibility).
- `IgnoreSymbols`
@@ -463,6 +471,11 @@ Affected public APIs:
- String.IndexOf
- String.LastIndexOf
+Methods that calculate `matchLength` throw PlatformNotSupported exception:
+[CompareInfo.IndexOf](https://learn.microsoft.com/en-us/dotnet/api/system.globalization.compareinfo.indexof?view=system-globalization-compareinfo-indexof(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+
+[CompareInfo.LastIndexOf](https://learn.microsoft.com/en-us/dotnet/api/system.globalization.compareinfo.lastindexof?view=system-globalization-compareinfo-lastindexof(system-readonlyspan((system-char))-system-readonlyspan((system-char))-system-globalization-compareoptions-system-int32@))
+
Mapped to Apple Native API `rangeOfString:options:range:locale:`(https://developer.apple.com/documentation/foundation/nsstring/1417348-rangeofstring?language=objc)
In `rangeOfString:options:range:locale:` objects are compared by checking the Unicode canonical equivalence of their code point sequences.
diff --git a/docs/project/list-of-diagnostics.md b/docs/project/list-of-diagnostics.md
index da1639e0d11..a9dee40efdc 100644
--- a/docs/project/list-of-diagnostics.md
+++ b/docs/project/list-of-diagnostics.md
@@ -263,7 +263,7 @@ The diagnostic id values reserved for .NET Libraries analyzer warnings are `SYSL
| __`SYSLIB1222`__ | Constructor annotated with JsonConstructorAttribute is inaccessible. |
| __`SYSLIB1223`__ | Attributes deriving from JsonConverterAttribute are not supported by the source generator. |
| __`SYSLIB1224`__ | Types annotated with JsonSerializableAttribute must be classes deriving from JsonSerializerContext. |
-| __`SYSLIB1225`__ | *`SYSLIB1220`-`SYSLIB1229` reserved for System.Text.Json.SourceGeneration.* |
+| __`SYSLIB1225`__ | Type includes ref like property, field or constructor parameter. |
| __`SYSLIB1226`__ | *`SYSLIB1220`-`SYSLIB1229` reserved for System.Text.Json.SourceGeneration.* |
| __`SYSLIB1227`__ | *`SYSLIB1220`-`SYSLIB1229` reserved for System.Text.Json.SourceGeneration.* |
| __`SYSLIB1228`__ | *`SYSLIB1220`-`SYSLIB1229` reserved for System.Text.Json.SourceGeneration.* |
@@ -310,3 +310,4 @@ Diagnostic id values for experimental APIs must not be recycled, as that could s
| __`SYSLIB5002`__ | .NET 9 | TBD | `SystemColors` alternate colors are experimental in .NET 9 |
| __`SYSLIB5003`__ | .NET 9 | TBD | `System.Runtime.Intrinsics.Arm.Sve` is experimental in .NET 9 |
| __`SYSLIB5004`__ | .NET 9 | TBD | `X86Base.DivRem` is experimental in .NET 9 since performance is not as optimized as `T.DivRem` |
+| __`SYSLIB5005`__ | .NET 9 | TBD | `System.Formats.Nrbf` is experimental in .NET 9 |
diff --git a/docs/tools/illink/data-formats.md b/docs/tools/illink/data-formats.md
index e360a914b14..5c0dc0ac478 100644
--- a/docs/tools/illink/data-formats.md
+++ b/docs/tools/illink/data-formats.md
@@ -382,7 +382,7 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
@@ -398,7 +398,7 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
@@ -414,7 +414,7 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ ArgumentValue
@@ -431,21 +431,21 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
- PublicConstructors
+ PublicConstructors
- DefaultConstructor
+ DefaultConstructor
@@ -463,17 +463,17 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
- DefaultConstructor
+ DefaultConstructor
- PublicConstructors
+ PublicConstructors
@@ -491,7 +491,7 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
@@ -507,7 +507,7 @@ This allows to add a custom attribute to a class, interface, delegate, struct or
- DefaultConstructor
+ DefaultConstructor
@@ -528,7 +528,7 @@ attributes are applied.
- PublicConstructors
+ PublicConstructors
diff --git a/docs/workflow/README.md b/docs/workflow/README.md
index 74bbc892f3e..78b4fd3f19b 100644
--- a/docs/workflow/README.md
+++ b/docs/workflow/README.md
@@ -1,104 +1,162 @@
# Workflow Guide
-- [Build Requirements](#build-requirements)
-- [Getting Yourself Started](#getting-yourself-started)
-- [Configurations and Subsets](#configurations-and-subsets)
- - [What does this mean for me?](#what-does-this-mean-for-me)
-- [Full Instructions on Building and Testing the Runtime Repo](#full-instructions-on-building-and-testing-the-runtime-repo)
+- [Introduction](#introduction)
+- [Important Concepts to Understand](#important-concepts-to-understand)
+ - [Build Configurations](#build-configurations)
+- [Building the Repo](#building-the-repo)
+ - [General Overview](#general-overview)
+ - [Get Started on your Platform and Components](#get-started-on-your-platform-and-components)
+ - [General Recommendations](#general-recommendations)
+- [Testing the Repo](#testing-the-repo)
+ - [Performance Analysis](#performance-analysis)
- [Warnings as Errors](#warnings-as-errors)
- [Submitting a PR](#submitting-a-pr)
-- [Triaging errors in CI](#triaging-errors-in-ci)
+- [Triaging Errors in CI](#triaging-errors-in-ci)
-The repo can be built for the following platforms, using the provided setup and the following instructions. Before attempting to clone or build, please check the requirements that match your machine, and ensure you install and prepare all as necessary.
+## Introduction
-## Build Requirements
+The runtime repo can be worked with on Windows, Linux, macOS, and FreeBSD. Each platform has its own specific requirements to work properly, and not all architectures are supported for dev work. That said, the builds can target a wider range of platforms beyond the ones mentioned earlier. You can see it as there are always two platforms at play whenever you are working with builds in the runtime repo:
+
+- **The Build Platform:** This is the platform of the machine where you cloned the runtime repo and therefore where all your build tools are running on. The following table shows the OS and architecture combinations that we currently support, as well as links to each OS's requirements doc. If you are using WSL directly (i.e. not Docker), then follow the Linux requirements doc.
| Chip | Windows | Linux | macOS | FreeBSD |
-| :---- | :------: | :------: | :------: | :------: |
+| :---: | :------: | :------: | :------: | :------: |
| x64 | ✔ | ✔ | ✔ | ✔ |
| x86 | ✔ | | | |
| Arm32 | | ✔ | | |
| Arm64 | ✔ | ✔ | ✔ | |
| | [Requirements](requirements/windows-requirements.md) | [Requirements](requirements/linux-requirements.md) | [Requirements](requirements/macos-requirements.md) | [Requirements](requirements/freebsd-requirements.md)
+- **The Target Platform:** This is the platform you are building the artifacts for, i.e. the platform you intend to run your builds on.
+
+The *Build Platform* and the *Target Platform* can be either the same as or different from each other. The former scenario is straightforward, as you will likely be doing all the work on the same machine. In the latter scenario, the process is called *cross-compiling*. There are certain workflows that require you to follow this process, as it is not possible to build the repo directly on those platforms (e.g., Web Assembly (WASM), Browser, Mobiles). The full instructions on how to work with this are detailed in the building docs later on.
+
Additionally, keep in mind that cloning the full history of this repo takes roughly 400-500 MB of network transfer, inflating to a repository that can consume somewhere between 1 to 1.5 GB. A build of the repo can take somewhere between 10 and 20 GB of space for a single OS and Platform configuration depending on the portions of the product built. This might increase over time, so consider this to be a minimum bar for working with this codebase.
-## Getting Yourself Started
+The runtime repo consists of three major components:
+
+- The Runtimes (CoreCLR and Mono)
+- The Libraries
+- The Hosts and Installers
+
+You can run your builds from a regular terminal, from the root of the repository. Sudo and administrator privileges are not needed for this.
+
+- For instructions on how to edit code and make changes, see [Editing and Debugging](/docs/workflow/editing-and-debugging.md).
+- For instructions on how to debug CoreCLR, see [Debugging CoreCLR](/docs/workflow/debugging/coreclr/debugging-runtime.md).
+- For instructions on using GitHub Codespaces, see [Codespaces](/docs/workflow/Codespaces.md).
+
+## Important Concepts to Understand
+
+The following sections describe some important terminology to keep in mind while working with runtime repo builds. For more information, and a complete list of acronyms and their meanings, check out the glossary [over here](/docs/project/glossary.md).
+
+### Build Configurations
+
+To work with the runtime repo, there are three supported configurations (one is *CoreCLR* exclusive) that define how your build will behave:
+
+- **Debug**: Non-optimized code. Asserts are enabled. This configuration runs the slowest. As its name suggests, it provides the best experience for debugging the product.
+- **Checked** *(CoreCLR runtime exclusive)*: Optimized code. Asserts are enabled.
+- **Release**: Optimized code. Asserts are disabled. Runs at the best speed, and is most suitable for performance profiling. This will impact the debugging experience however, due to compiler optimizations that make understanding what the debugger shows difficult, relative to the source code.
+
+### Build Components
+
+- **Runtime**: The execution engine for managed code. There are two different implementations, both written in C or C++:
+ - *CoreCLR*: The comprehensive execution engine originally born from .NET Framework. Its source code lives under the [src/coreclr](/src/coreclr) subtree.
+ - *Mono*: A slimmer runtime than CoreCLR, originally born open-source to bring .NET and C# support to non-Windows platforms. Due to its lightweight nature, it is less affected in terms of speed when working with the *Debug* configuration. Its source code lives under the [src/mono](/src/mono) subtree.
+
+- **CoreLib** *(also known as System.Private.CoreLib)*: The lowest level managed library. It is directly related to the runtime, which means it must be built in the matching configuration (e.g. Building a *Debug* runtime means *CoreLib* must also be in *Debug*). The `clr` subset includes both, the *Runtime* and the *CoreLib* components, so you usually don't have to worry about that. There are, however, some special cases where you might need to build the components separately. The runtime agnostic code for this library can be found at [src/libraries/System.Private.CoreLib/src](/src/libraries/System.Private.CoreLib/src/README.md).
+
+- **Libraries**: The bulk of dll's providing the rest of the functionality to the runtime. The libraries can be built in their own configuration, regardless of which one the runtime is using. Their source code lives under the [src/libraries](/src/libraries) subtree.
+
+## Building the Repo
+
+The main script that will be in charge of most of the building you might want to do is the `build.sh`, or `build.cmd` on Windows, located at the root of the repo. This script receives as arguments the subset(s) you might want to build, as well as multiple parameters to configure your build, such as the configuration, target operating system, target architecture, and so on.
+
+**NOTE:** If you plan on using Docker to work on the runtime repo, read [this doc](/docs/workflow/using-docker.md) first. It explains how to set up, as well as the images and containers to prepare you to follow the building and testing instructions in the next sections.
+
+### General Overview
+
+Running the script (`build.sh`/`build.cmd`) with no arguments will build the whole repo in *Debug* configuration, for the OS and architecture of your machine. A typical dev workflow only one or two components at a time, so it is more efficient to just build those. This is done by means of the `-subset` flag. For example, for CoreCLR, it would be:
+
+```bash
+./build.sh -subset clr
+```
+
+The main subset values are:
+
+- `Clr`: The full CoreCLR runtime, which consists of the runtime itself and the CoreLib components.
+- `Libs`: All the libraries components, excluding their tests. This includes the libraries' native parts, refs, source assemblies, and their packages and test infrastructure.
+- `Packs`: The shared framework packs, archives, bundles, installers, and the framework pack tests.
+- `Host`: The .NET hosts, packages, hosting libraries, and their tests.
+- `Mono`: The Mono runtime and its CoreLib.
-The runtime repo can be built from a regular, non-administrator command prompt, from the root of the repo.
+Some subsets are subsequently divided into smaller pieces, giving you more flexibility as to what to build/rebuild depending on what you're working on. For a full list of all the supported subsets, run the build script, passing `help` as the argument to the `subset` flag.
-The repository currently consists of three different major parts:
+It is also possible to build more than one subset under the same command-line. In order to do this, you have to link them together with a `+` sign in the value you're passing to `-subset`. For example, to build both, CoreCLR and Libraries in Release configuration, the command-line would look like this:
-* The Runtimes
-* The Libraries
-* The Installer
+```bash
+./build.sh -subset clr+libs -configuration Release
+```
-More info on this, as well as the different build configurations in the [Configurations and Subsets section](#configurations-and-subsets).
+If you require to use different configurations for different subsets, there are some specific flags you can use:
-This was a concise introduction and now it's time to show the specifics of building specific subsets in any given supported platform, since most likely you will want to customize your builds according to what component(s) you're working on, as well as how you configured your build environment. We have links to instructions depending on your needs [in this section](#full-instructions-on-building-and-testing-the-runtime-repo).
+- `-runtimeConfiguration (-rc)`: The CoreCLR build configuration
+- `-librariesConfiguration (-lc)`: The Libraries build configuration
+- `-hostConfiguration (-hc)`: The Host build configuration
-* For instructions on how to edit code and make changes, see [Editing and Debugging](editing-and-debugging.md).
-* For instructions on how to debug CoreCLR, see [Debugging CoreCLR](/docs/workflow/debugging/coreclr/debugging-runtime.md).
-* For instructions on using GitHub Codespaces, see [Codespaces](/docs/workflow/Codespaces.md).
+The behavior of the script is that the general configuration flag `-c` affects all subsets that have not been qualified with a more specific flag, as well as the subsets that don't have a specific flag supported, like `packs`. For example, the following command-line would build the libraries in *Release* mode and the runtime in *Debug* mode:
-## Configurations and Subsets
+```bash
+./build.sh -subset clr+libs -configuration Release -runtimeConfiguration Debug
+```
-You may need to build the tree in a combination of configurations. This section explains why.
+In this example, the `-lc` flag was not specified, so `-c` qualifies `libs`. In the first example, only `-c` was passed, so it qualifies both, `clr` and `libs`.
-
-A quick reminder of some concepts -- see the [glossary](/docs/project/glossary.md) for more on these:
+As an extra note here, if your first argument to the build script are the subsets, you can omit the `-subset` flag altogether. Additionally, several of the supported flags also include a shorthand version (e.g. `-c` for `-configuration`). Run the script with `-h` or `-help` to get an extensive overview on all the supported flags to customize your build, including their shorthand forms, as well as a wider variety of examples.
-* **Debug Configuration** -- Non-optimized code. Asserts are enabled.
-* **Checked Configuration** -- Optimized code. Asserts are enabled. _Only relevant to CoreCLR runtime._
-* **Release Configuration** -- Optimized code. Asserts are disabled. Runs at the best speed, and suitable for performance profiling. This will impact the debugging experience due to compiler optimizations that make understanding what the debugging is showing difficult to reason about, relative to the source code.
+**NOTE:** On non-Windows systems, the longhand versions of the flags can be passed with either single `-` or double `--` dashes.
-When we talk about mixing configurations, we're discussing the following sub-components:
+### Get Started on your Platform and Components
-
-* **Runtime** is the execution engine for managed code and there are two different implementations available. Both are written in C/C++, therefore, easier to debug when built in a Debug configuration.
- * CoreCLR is the comprehensive execution engine which, if built in Debug Configuration, executes managed code very slowly. For example, it will take a long time to run the managed code unit tests. The code lives under [src/coreclr](/src/coreclr).
- * Mono is a portable and also slimmer runtime and it's not that sensitive to Debug Configuration for running managed code. You will still need to build it without optimizations to have good runtime debugging experience though. The code lives under [src/mono](/src/mono).
-* **CoreLib** (also known as System.Private.CoreLib) is the lowest level managed library. It has a special relationship to the runtimes and therefore it must be built in the matching configuration, e.g., if the runtime you are using was built in a Debug configuration, this must be in a Debug configuration. The runtime agnostic code for this library can be found at [src/libraries/System.Private.CoreLib/src](/src/libraries/System.Private.CoreLib/src/README.md).
-* **Libraries** is the bulk of the dlls that are oblivious to the configuration that runtimes and CoreLib were built in. They are most debuggable when built in a Debug configuration, and happily, they still run sufficiently fast in that configuration that it's acceptable for development work. The code lives under [src/libraries](/src/libraries).
+Now that you've got the general idea on how to get started, it is important to mention that, while the procedure is very similar among platforms and subsets, each component has its own technicalities and details, as explained in their own specific docs:
-
-To build just one part of the repo, you add the `-subset` flag with the subset you wish to build to the root build script _(build.cmd/sh)_. You can specify more than one by linking them with the `+` operator (e.g. `-subset clr+libs` would build CoreCLR and the libraries). Note that if the subset is the first argument you pass to the script, you can omit the `--subset` flag altogether.
+**Component Specifics:**
-### What does this mean for me?
+- [CoreCLR](/docs/workflow/building/coreclr/README.md)
+- [Libraries](/docs/workflow/building/libraries/README.md)
+- [Mono](/docs/workflow/building/mono/README.md)
-At this point you probably know what you are planning to work on primarily: the runtimes or libraries. As general suggestions on how to proceed, here are some ideas:
+**NOTE:** *NativeAOT* is part of CoreCLR, but it has its own specifics when it comes to building. We have a separate doc dedicated to it [over here](/docs/workflow/building/coreclr/nativeaot.md).
-* If you're working in runtimes, you may want to build everything in the Debug configuration, depending on how comfortable you are debugging optimized native code.
-* If you're working in libraries, you will want to use debug libraries with a release version of runtime and CoreLib, because the tests will run faster.
-* If you're working in CoreLib - you probably want to try to get the job done with release runtime and CoreLib, and fall back to debug if you need to. The [Building Libraries](/docs/workflow/building/libraries/README.md) document explains how you'll do this.
+### General Recommendations
-## Full Instructions on Building and Testing the Runtime Repo
+- If you're working with the runtimes, then the usual recommendation is to build everything in *Debug* mode. That said, if you know you won't be debugging the libraries source code but will need them (e.g. for a *Core_Root* build), then building the libraries on *Release* instead will provide a more productive experience.
+- The counterpart to the previous point: When you are working in libraries. In this case, it is recommended to build the runtime on *Release* and the libraries on *Debug*.
+- If you're working on *CoreLib*, then you probably want to try to get the job done with a *Release* runtime, and fall back to *Debug* if you need to.
-Now you know about configurations and how we use them, so now you will want to read how to build what you plan to work on. Each of these will have further specific instructions or links for whichever platform you are developing on.
+## Testing the Repo
-* [Building CoreCLR runtime](/docs/workflow/building/coreclr/README.md)
-* [Building Mono runtime](/docs/workflow/building/mono/README.md)
-* [Building Libraries](/docs/workflow/building/libraries/README.md)
+Building the components of the repo is just part of the experience. The runtime repo also includes vast test suites you can run to ensure your changes work properly as expected and don't inadvertently break something else. Each component has its own methodologies to run their tests, which are explained in their own specific docs:
-After that, here's information about how to run tests:
+- [CoreCLR](/docs/workflow/testing/coreclr/testing.md)
+ - [NativeAOT](/docs/workflow/building/coreclr/nativeaot.md#running-tests)
+- [Libraries](/docs/workflow/testing/libraries/testing.md)
+- [Mono](/docs/workflow/testing/mono/testing.md)
-* [Testing CoreCLR runtime](/docs/workflow/testing/coreclr/testing.md)
-* [Testing Mono runtime](/docs/workflow/testing/mono/testing.md)
-* [Testing Libraries](/docs/workflow/testing/libraries/testing.md)
+### Performance Analysis
-And how to measure performance:
+Fixing bugs and adding new features aren't the only things to work on in the runtime repo. We also have to ensure performance is kept as optimal as can be, and that is done through benchmarking and profiling. If you're interested in conducting these kinds of analysis, the following links will show you the usual workflow you can follow:
-* [Benchmarking workflow for dotnet/runtime repository](https://github.com/dotnet/performance/blob/master/docs/benchmarking-workflow-dotnet-runtime.md)
-* [Profiling workflow for dotnet/runtime repository](https://github.com/dotnet/performance/blob/master/docs/profiling-workflow-dotnet-runtime.md)
+* [Benchmarking Workflow for dotnet/runtime repository](https://github.com/dotnet/performance/blob/master/docs/benchmarking-workflow-dotnet-runtime.md)
+* [Profiling Workflow for dotnet/runtime repository](https://github.com/dotnet/performance/blob/master/docs/profiling-workflow-dotnet-runtime.md)
## Warnings as Errors
-The repo build treats warnings as errors. Dealing with warnings when you're in the middle of making changes can be annoying (e.g. unused variable that you plan to use later). To disable treating warnings as errors, set the `TreatWarningsAsErrors` environment variable to `false` before building. This variable will be respected by both the `build.sh`/`build.cmd` root build scripts and builds done with `dotnet build` or Visual Studio. Some people may prefer setting this environment variable globally in their machine settings.
+The repo build treats warnings as errors, including many code-style warnings. Dealing with warnings when you're in the middle of making changes can be annoying (e.g. unused variable that you plan to use later). To disable treating warnings as errors, set the `TreatWarningsAsErrors` environment variable to `false` before building. This variable will be respected by both the `build.sh`/`build.cmd` root build scripts and builds done with `dotnet build` or Visual Studio. Some people may prefer setting this environment variable globally in their machine settings.
## Submitting a PR
-Before submitting a PR, make sure to review the [contribution guidelines](../../CONTRIBUTING.md). After you get familiarized with them, please read the [PR guide](ci/pr-guide.md) to find more information about tips and conventions around creating a PR, getting it reviewed, and understanding the CI results.
+Before submitting a PR, make sure to review the [contribution guidelines](/CONTRIBUTING.md). After you get familiarized with them, please read the [PR guide](/docs/workflow/ci/pr-guide.md) to find more information about tips and conventions around creating a PR, getting it reviewed, and understanding the CI results.
-## Triaging errors in CI
+## Triaging Errors in CI
-Given the size of the runtime repository, flaky tests are expected to some degree. There are a few mechanisms we use to help with the discoverability of widely impacting issues. We also have a regular procedure that ensures issues get properly tracked and prioritized. You can find more information on [triaging failures in CI](ci/failure-analysis.md).
+Given the size of the runtime repository, flaky tests are expected to some degree. There are a few mechanisms we use to help with the discoverability of widely impacting issues. We also have a regular procedure that ensures issues get properly tracked and prioritized. You can find more information on [triaging failures in CI](/docs/workflow/ci/failure-analysis.md).
diff --git a/docs/workflow/building/coreclr/README.md b/docs/workflow/building/coreclr/README.md
index fbc1eab77c7..fc1b7aec225 100644
--- a/docs/workflow/building/coreclr/README.md
+++ b/docs/workflow/building/coreclr/README.md
@@ -1,100 +1,156 @@
-# Building CoreCLR
+# Building CoreCLR Guide
-* [Introduction](#introduction)
-* [Common Building Options](#common-building-options)
- * [Build Drivers](#build-drivers)
- * [Extra Flags](#extra-flags)
- * [Build Results Layout](#build-results-layout)
-* [Platform-Specific Instructions](#platform-specific-instructions)
-* [Testing CoreCLR](#testing-coreclr)
+- [The Basics](#the-basics)
+ - [Build Results](#build-results)
+ - [What to do with the Build](#what-to-do-with-the-build)
+ - [The Core_Root for Testing Your Build](#the-core-root-for-testing-your-build)
+ - [The Dev Shipping Packs](#the-dev-shipping-packs)
+ - [Cross Compilation](#cross-compilation)
+- [Other Features](#other-features)
+ - [Build Drivers](#build-drivers)
+ - [Extra Flags](#extra-flags)
+ - [Native ARM64 Building on Windows](#native-arm64-building-on-windows)
+ - [Debugging Information for macOS](#debugging-information-for-macos)
+ - [Native Sanitizers](#native-sanitizers)
-## Introduction
+Firstly, make sure you've prepared your environment and installed all the requirements for your platform. If not, follow this [link](/docs/workflow/README.md#introduction) for the corresponding instructions.
-Here is a brief overview on how to build the common form of CoreCLR in general. For further specific instructions on each platform, we have links to instructions later on in [Platform-Specific Instructions](#platform-specific-instructions).
+## The Basics
-To build just CoreCLR, use the `subset` flag to the `build.sh` or `build.cmd` script at the repo root. Note that specifying `-subset` explicitly is not necessary if it is the first argument (i.e. `./build.sh --subset clr` and `./build.sh clr` are equivalent). However, if you specify any other argument beforehand, then you must specify the `-subset` flag.
-
-For Linux and macOS:
+As explained in the main workflow [*README*](/docs/workflow/README.md), you can build the CoreCLR runtime by passing `-subset clr` as argument to the repo's main `build.sh`/`build.cmd` script:
```bash
-./build.sh --subset clr
+./build.sh -subset clr
```
-For Windows:
+By default, the script builds the _clr_ in *Debug* configuration, which doesn't have any optimizations and has all assertions enabled. If you're aiming to run performance benchmarks, make sure you select the *Release* version with `-configuration Release`, as that one generates the most optimized code. On the other hand, if your goal is to run tests, then you can take the most advantage from CoreCLR's exclusive *Checked* configuration. This one retains the assertions but has the native compiler optimizations enabled, thus making it run faster than *Debug*. This is the usual mode used for running tests in the CI pipelines.
-```cmd
-.\build.cmd -subset clr
-```
+### Build Results
-## Common Building Options
+Once the `clr` build completes, the main generated artifacts are placed in `artifacts/bin/coreclr/..`. For example, for a Linux x64 Release build, the output path would be `artifacts/bin/coreclr/linux.x64.Release`. Here, you will find a number of different binaries, of which the most important are the following:
-By default, the script generates a _Debug_ build type, which is not optimized code and includes asserts. As its name suggests, this makes it easier and friendlier to debug the code. If you want to make performance measurements, you ought to build the _Release_ version instead, which doesn't have any asserts and has all code optimizations enabled. Likewise, if you plan on running tests, the _Release_ configuration is more suitable since it's considerably faster than the _Debug_ one. For this, you add the flag `-configuration release` (or `-c release`). For example:
+- `corerun`: The command-line host executable. This program loads and starts the CoreCLR runtime and receives the managed program you want to run as argument (e.g. `./corerun program.dll`). On Windows, it is called `corerun.exe`.
+- `coreclr`: The CoreCLR runtime itself. On Windows, it's called `coreclr.dll`, on macOS it is `libcoreclr.dylib`, and on Linux it is `libcoreclr.so`.
+- `System.Private.CoreLib.dll`: The core managed library, containing the definitions of `Object` and the base functionality.
-```bash
-./build.sh --subset clr --configuration release
-```
+All the generated logs are placed in under `artifacts/log`, and all the intermediate output the build uses is placed in the `artifacts/obj/coreclr` directory.
-As mentioned before in the [general building document](/docs/workflow/README.md#configurations-and-subsets), CoreCLR also supports a _Checked_ build type which has asserts enabled like _Debug_, but is built with the native compiler optimizer enabled, so it runs much faster. This is the usual mode used for running tests in the CI system.
+### What to do with the Build
-Now, it is also possible to select a different configuration for each subset when building them together. The `--configuration` flag applies universally to all subsets, but it can be overridden with any one or more of the following ones:
+*CoreCLR* is one of the most important components of the runtime repo, as it is one of the main engines of the .NET product. That said, while you can test and use it on its own, it is easiest to do this when used in conjunction with the *Libraries* subset. When you build both subsets, you can get access to the *Core_Root*. This includes all the libraries and the Clr, alongside other tools like *Crossgen2*, *R2RDump*, and the *ILC* compiler, and the main command-line host executable `corerun`, all bundled together. The *Core_Root* is one of the most reliable ways of testing changes to the runtime, running external apps with your build, and it is the way Clr tests are run in the CI pipelines.
-* `--runtimeConfiguration (-rc)`: Flag for the CLR build configuration.
-* `--librariesConfiguration (-lc)`: Flag for the libraries build configuration.
-* `--hostConfiguration (-hc)`: Flag for the host build configuration.
+#### The Core_Root for Testing Your Build
-For example, a very common scenario used by developers and the repo's test scripts with default options, is to build the _clr_ in _Debug_ mode, and the _libraries_ in _Release_ mode. To achieve this, the command-line would look like the following:
+As described in the [workflow README](/docs/workflow/README.md#building-the-repo), you can build multiple subsets by concatenating them with a `+` sign in the `-subset` argument. To prepare to build the *Core_Root*, we need to build the libraries and CoreCLR. Thus, the `-subset` argument would be `clr+libs`. Usually, the recommended workflow is to build the clr in *Debug* configuration and the libraries in *Release*:
```bash
-./build.sh --subset clr+libs --configuration Release --runtimeConfiguration Debug
+./build.sh -subset clr+libs -runtimeConfiguration Debug -librariesConfiguration Release
```
-Or alternatively:
+Once you have both subsets built, you can generate the *Core_Root*, which as mentioned above, is the most flexible way of testing your changes. You can generate the *Core_Root* by running the following command, assuming a *Checked* clr build on an x64 machine:
```bash
-./build.sh --subset clr+libs --librariesConfiguration Release --runtimeConfiguration Debug
+./src/tests/build.sh -x64 -checked -generatelayoutonly
```
-For more information about all the different options available, supply the argument `-help|-h` when invoking the build script. On Unix-like systems, non-abbreviated arguments can be passed in with a single `-` or double hyphen `--`.
+Since this is more related to testing, you can find the full details and instructions in the CoreCLR testing doc [over here](/docs/workflow/testing/coreclr/testing.md).
-### Build Drivers
+#### The Dev Shipping Packs
-If you want to use _Ninja_ to drive the native build instead of _Make_ on non-Windows platforms, you can pass the `-ninja` flag to the build script as follows:
+It is also possible to generate the full runtime NuGet packages and installer that you can use to test in a more production-esque scenario. To generate these shipping artifacts, you have to build the `clr`, `libs`, `host`, and `packs` subsets:
```bash
-./build.sh --subset clr --ninja
+./build.sh -subset clr+libs+host+packs -configuration Release
```
-If you want to use Visual Studio's _MSBuild_ to drive the native build on Windows, you can pass the `-msbuild` flag to the build script similarly to the `-ninja` flag.
+The shipping artifacts are placed in the `artifacts/packages//Shipping` directory. Here, you will find several NuGet packages, as well as their respective symbols packages, generated from your build. More importantly, you will find a zipped archive with the full contents of the runtime, organized in the same layout as they are in the official dotnet installations. This archive includes the following files:
+
+- `host/fxr/-dev/hostfxr` (`hostfxr` is named differently depending on the platform: `hostfxr.dll` on Windows, `libhostfxr.dylib` on macOS, and `libhostfxr.so` on Linux)
+- `shared/Microsoft.NETCore.App/-dev/*` (The `*` here refers to all the libraries dll's, as well as all the binaries necessary for the runtime to function)
+- `dotnet (dotnet.exe on Windows)` (The main `dotnet` executable you usually use to run your apps)
+
+Note that this package only includes the runtime, therefore you will only be able to run apps but not build them. For that, you would need the full SDK.
+
+**NOTE:** On Windows, this will also include `.exe` and `.msi` installers, which you can use in case you want to test your build machine-wide. This is the closest you can get to an official build installation.
+
+For a full guide on using the shipping packages for testing checkout the doc we have dedicated to it [over here](/docs/workflow/testing/using-dev-shipping-packages.md).
-We recommend using _Ninja_ for building the project on Windows since it more efficiently uses the build machine's resources for the native runtime build in comparison to Visual Studio's _MSBuild_.
+### Cross Compilation
+
+Using an x64 machine, it is possible to generate builds for other architectures. Not all architectures are supported for cross-compilation however, and it's also dependent on the OS you are using to build and target. Refer to the table below for the compatibility matrix.
+
+| Operating System | To x86 | To Arm32 | To Arm64 |
+| :--------------: | :------: | :------: | :------: |
+| Windows | ✔ | | ✔ |
+| macOS | | | ✔ |
+| Linux | | ✔ | ✔ |
+
+**NOTE:** On macOS, it is also possible to cross-compile from ARM64 to x64 using an Apple Silicon Mac.
+
+Detailed instructions on how to do cross-compilation can be found in the cross-building doc [over here](/docs/workflow/building/coreclr/cross-building.md).
+
+## Other Features
+
+### Build Drivers
+
+By default, the CoreCLR build uses *Ninja* as the native build driver on Windows, and *Make* on non-Windows platforms. You can override this behavior by passing the appropriate flags to the build script:
+
+To use Visual Studio's *MSBuild* instead of *Ninja* on Windows:
+
+```cmd
+./build.cmd -subset clr -msbuild
+```
+
+It is recommended to use *Ninja* on Windows, as it uses the build machine's resources more efficiently in comparison to Visual Studio's *MSBuild*.
+
+To use *Ninja* instead of *Make* on non-Windows:
+
+```bash
+./build.sh -subset clr -ninja
+```
### Extra Flags
-To pass extra compiler/linker flags to the coreclr build, set the environment variables `EXTRA_CFLAGS`, `EXTRA_CXXFLAGS` and `EXTRA_LDFLAGS` as needed. Don't set `CFLAGS`/`CXXFLAGS`/`LDFLAGS` directly as that might lead to configure-time tests failing.
+You can also pass some extra compiler/linker flags to the CoreCLR build. Set the `EXTRA_CFLAGS`, `EXTRA_CXXFLAGS`, and `EXTRA_LDFLAGS` as you see fit for this purpose. The build script will consume them and then set the environment variables that will ultimately affect your build (i.e. those same ones without the `EXTRA_` prefix). Don't set the final ones directly yourself, as that is known to lead to potential failures in configure-time tests.
-### Build Results Layout
+### Native ARM64 Building on Windows
-Once the build has concluded, it will have produced its output artifacts in the following structure:
+Currently, the runtime repo supports building CoreCLR directly on Windows ARM64 without the need to cross-compile, albeit it is still in an experimental phase. To do this, you need to install the ARM64 build tools and Windows SDK for Visual Studio, in addition to all the requirements outlined in the [Windows Requirements doc](/docs/workflow/requirements/windows-requirements.md).
-* Product binaries will be dropped in `artifacts\bin\coreclr\..` folder.
-* A NuGet package, _Microsoft.Dotnet.CoreCLR_, will be created under `artifacts\bin\coreclr\..\.nuget` folder.
-* Test binaries (if built) will be dropped under `artifacts\tests\coreclr\..` folder. However, remember the root build script will not build the tests. The instructions for working with tests (building and running) are [in the testing doc](/docs/workflow/testing/coreclr/testing.md).
-* The build places logs in `artifacts\log` and these are useful when the build fails.
-* The build places all of its intermediate output in the `artifacts\obj\coreclr` directory.
+Once those requirements are fulfilled, you have to tell the build script to compile for Arm64 using *MSBuild*. *Ninja* is not yet supported on Arm64 platforms:
-If you want to force a full rebuild of the subsets you specified when calling the build script, pass the `-rebuild` flag to it, in addition to any other arguments you might require.
+```cmd
+./build.cmd -subset clr -arch arm64 -msbuild
+```
-## Platform-Specific Instructions
+While this is functional at the time of writing this doc, it is still recommended to cross-compile from an x64 machine, as that's the most stable and tested method.
-Now that you've got the general idea on how the _CoreCLR_ builds work, here are some further documentation links on platform-specific caveats and features.
+### Debugging Information for macOS
-* [Build CoreCLR on Windows](windows-instructions.md)
-* [Build CoreCLR on macOS](macos-instructions.md)
-* [Build CoreCLR on Linux](linux-instructions.md)
-* [Build CoreCLR on FreeBSD](freebsd-instructions.md)
+When building on macOS, the build process puts native component symbol and debugging information into `.dwarf` files, one for each built binary. This is not the native format used by macOS, and debuggers like LLDB can't automatically find them. The format macOS uses is `.dSYM` bundles. To generate them and get a better inner-loop developer experience (e.g. have the LLDB debugger automatically find program symbols and display source code lines, etc.), make sure to enable the `DCLR_CMAKE_APPLE_DYSM` flag when calling the build script:
-We also have specific instructions for building _NativeAOT_ [here](/docs/workflow/building/coreclr/nativeaot.md).
+```bash
+./build.sh -subset clr -cmakeargs "-DCLR_CMAKE_APPLE_DYSM=TRUE"
+```
+
+**NOTE:** Converting the entire build process to build and package `.dSYM` bundles on macOS by default is on the table and tracked by issue #92911 [over here](https://github.com/dotnet/runtime/issues/92911).
+
+### Native Sanitizers
+
+CoreCLR is also in the process of supporting the use of native sanitizers during the build to help catch memory safety issues. To apply them, add the `-fsanitize` flag followed by the name of the sanitizer as argument. As of now, these are the supported sanitizers with plans of adding more in the future:
+
+- Sanitizer Name: `AddressSanitizer`
-## Testing CoreCLR
+ Argument to `-fsanitize`: `address`
-For testing your build, the [testing docs](/docs/workflow/testing/coreclr/testing.md) have detailed instructions on how to do it.
+| Platform | Support Status |
+| :------: | :---------------------: |
+| Windows | Regularly Tested on x64 |
+| macOS | Regularly Tested on x64 |
+| Linux | Regularly Tested on x64 |
+
+And to use it, the command would look as follows:
+
+```bash
+./build.sh -subset clr -fsanitize address
+```
diff --git a/docs/workflow/building/coreclr/linux-instructions.md b/docs/workflow/building/coreclr/linux-instructions.md
deleted file mode 100644
index f19bcfbd1e7..00000000000
--- a/docs/workflow/building/coreclr/linux-instructions.md
+++ /dev/null
@@ -1,118 +0,0 @@
-# Build CoreCLR on Linux
-
-* [Build using Docker](#build-using-docker)
- * [Docker Images](#docker-images)
-* [Build using your own Environment](#build-using-your-own-environment)
- * [Set the maximum number of file-handles](#set-the-maximum-number-of-file-handles)
-* [Build the Runtime](#build-the-runtime)
- * [Cross-Compilation](#cross-compilation)
-* [Create the Core_Root](#create-the-core_root)
-
-This guide will walk you through building CoreCLR on Linux.
-
-As mentioned in the [Linux requirements doc](/docs/workflow/requirements/linux-requirements.md), there are two options to build CoreCLR on Linux:
-
-* Build using Docker.
-* Build using your own environment.
-
-## Build using Docker
-
-Building using Docker will require that you choose the correct image for your environment.
-
-Note that the OS is strictly speaking not important. For example if you are on Ubuntu 20.04 and build using the Ubuntu 18.04 x64 image there should be no issues. You can even use Linux images on a Windows OS if you have [WSL](https://learn.microsoft.com/windows/wsl/about) enabled. However, note that you can't run multiple OS's on the same _Docker Daemon_, as it takes resources from the underlying kernel as needed. In other words, you can run either Linux on WSL, or Windows containers. You have to switch between them if you need both, and restart Docker.
-
-The target architecture is more important, as building arm32 using the x64 image will not work. There will be missing _rootfs_ components required by the build. See [Docker Images](#docker-images) below, for more information on choosing an image to build with.
-
-**NOTE**: The image's architecture has to match your machine's supported platforms. For example, you can't run arm32 images on an x64 machine. But you could run x64 and arm64 images on an M1 Mac, for example. This is thanks to the _Rosetta_ emulator that Apple Silicon provides. Same case applies to running x86 on an x64 Windows machine thanks to Windows' _SYSWOW64_. Likewise, you can run Linux arm32 images on a Linux arm64 host.
-
-Please note that choosing the same image as the host OS you are running on will allow you to run the product/tests outside of the docker container you built in.
-
-Once you have chosen an image, the build is one command run from the root of the runtime repository:
-
-```bash
-docker run --rm \
- -v :/runtime \
- -w /runtime \
- mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04 \
- ./build.sh --subset clr
-```
-
-Dissecting the command:
-
-* `--rm`: Erase the created container after use.
-* `-v :/runtime`: Mount the runtime repository under `/runtime`. Replace `` with the full path to your `runtime` repo clone, e.g., `-v /home/user/runtime:/runtime`.
-* `-w: /runtime`: Set /runtime as working directory for the container.
-* `mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-20210714125435-9b5bbc2`: Docker image name.
-* `./build.sh`: Command to be run in the container: run the root build command.
-* `-subset clr`: Build the clr subset (excluding libraries and installers).
-
-To do cross-building using Docker, you need to use either specific images designated for this purpose, or configure your own. Detailed information on this can be found in the [cross-building doc](/docs/workflow/building/coreclr/cross-building.md#cross-building-using-docker). Note that the official build images are all cross-build images, even when targeting the same architecture as the host image. This is because they target versions of glibc or musl libc that are included in the cross-build rootfs, and not the host OS.
-
-### Docker Images
-
-This table of images might often become stale as we change our images as our requirements change. The images used for our official builds can be found in [the pipeline resources](/eng/pipelines/common/templates/pipeline-with-resources.yml) of our Azure DevOps builds under the `container` key of the platform you plan to build. These image tags don't include version numbers, and our build infrastructure will automatically use the latest version of the image. You can ensure you are using the latest version by using `docker pull`, for example:
-
-```
-docker pull mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64
-```
-
-All official builds are cross-builds with a rootfs for the target OS, and will use the clang version available on the container.
-
-| Host OS | Target OS | Target Arch | Image location | crossrootfs location |
-| --------------------- | ------------ | --------------- | -------------------------------------------------------------------------------------- | -------------------- |
-| Azure Linux (x64) | Alpine 3.13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net9.0` | `/crossrootfs/x64` |
-| Azure Linux (x64) | Ubuntu 16.04 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0` | `/crossrootfs/x64` |
-| Azure Linux (x64) | Alpine | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-alpine-net9.0` | `/crossrootfs/arm` |
-| Azure Linux (x64) | Ubuntu 16.04 | arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-net9.0` | `/crossrootfs/arm` |
-| Azure Linux (x64) | Alpine | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-alpine-net9.0` | `/crossrootfs/arm64` |
-| Azure Linux (x64) | Ubuntu 16.04 | arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-net9.0` | `/crossrootfs/arm64` |
-| Azure Linux (x64) | Ubuntu 16.04 | x86 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-x86-net9.0` | `/crossrootfs/x86` |
-| CBL-mariner 2.0 (x64) | FreeBSD 13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-freebsd-13` | `/crossrootfs/x64` |
-
-These Docker images are built using the Dockerfiles maintained in the [dotnet-buildtools-prereqs-docker repo](https://github.com/dotnet/dotnet-buildtools-prereqs-docker).
-
-## Build using your own Environment
-
-Ensure you have all of the prerequisites installed from the [Linux Requirements](/docs/workflow/requirements/linux-requirements.md).
-
-### Set the maximum number of file-handles
-
-To ensure that your system can allocate enough file-handles for the libraries build, run the command in your terminal `sysctl fs.file-max`. If it is less than 100000, add `fs.file-max = 100000` to `/etc/sysctl.conf`, and then run `sudo sysctl -p`.
-
-## Build the Runtime
-
-To build CoreCLR on Linux, run `build.sh` while specifying the `clr` subset:
-
-```bash
-./build.sh --subset clr
-```
-
-After the build is completed, there should be some files placed in `artifacts/bin/coreclr/linux..` (for example `artifacts/bin/coreclr/linux.x64.Release`). The most important binaries are the following:
-
-* `corerun`: The command line host. This program loads and starts the CoreCLR runtime and passes the managed program (e.g. `program.dll`) you want to run with it.
-* `libcoreclr.so`: The CoreCLR runtime itself.
-* `System.Private.CoreLib.dll`: The core managed library, containing definitions of `Object` and base functionality.
-
-### Cross-Compilation
-
-Just like you can use specialized Docker images, you can also do any of the supported cross-builds for ARM32 or ARM64 on your own Linux environment. Detailed instructions are found in the [cross-building doc](/docs/workflow/building/coreclr/cross-building.md#linux-cross-building).
-
-## Create the Core_Root
-
-The Core_Root provides one of the main ways to test your build. Full instructions on how to build it in the [CoreCLR testing doc](/docs/workflow/testing/coreclr/testing.md), and we also have a detailed guide on how to use it for your own testing in [its own dedicated doc](/docs/workflow/testing/using-corerun-and-coreroot.md).
-
-## Native Sanitizers
-
-CoreCLR can be built with native sanitizers like AddressSanitizer to help catch memory safety issues. To build the project with native sanitizers, add the `-fsanitize address` argument to the build script like the following:
-
-```bash
-build.sh -s clr -fsanitize address
-```
-
-When building the repo with any native sanitizers, you should build all native components in the repo with the same set of sanitizers.
-
-The following sanitizers are supported for CoreCLR on Linux:
-
-| Sanitizer Name | `-fsanitize` argument | Support Status |
-|-----------------|-----------------------|----------------|
-| AddressSanitize | `address` | regularly tested on x64 |
diff --git a/docs/workflow/building/coreclr/macos-instructions.md b/docs/workflow/building/coreclr/macos-instructions.md
deleted file mode 100644
index 7ac0d0c6e0f..00000000000
--- a/docs/workflow/building/coreclr/macos-instructions.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# Build CoreCLR on macOS
-
-* [Environment](#environment)
-* [Build the Runtime](#build-the-runtime)
- * [Cross-Compilation](#cross-compilation)
-* [Create the Core_Root](#create-the-core_root)
-
-This guide will walk you through building CoreCLR on macOS.
-
-## Environment
-
-Ensure you have all of the prerequisites installed from the [macOS Requirements](/docs/workflow/requirements/macos-requirements.md).
-
-## Build the Runtime
-
-To build CoreCLR on macOS, run `build.sh` while specifying the `clr` subset:
-
-```bash
-./build.sh --subset clr
-```
-
-After the build has completed, there should be some files placed in `artifacts/bin/coreclr/osx..` (for example `artifacts/bin/coreclr/osx.x64.Release`). The most important binaries are the following:
-
-* `corerun`: The command line host. This program loads and starts the CoreCLR runtime and passes the managed program (e.g. `program.dll`) you want to run with it.
-* `libcoreclr.dylib`: The CoreCLR runtime itself.
-* `System.Private.CoreLib.dll`: The core managed library, containing definitions of `Object` and base functionality.
-
-### Cross-Compilation
-
-It is possible to get a macOS ARM64 build using an Intel x64 Mac and vice versa, an x64 one using an M1 Mac. Instructions on how to do this are in the [cross-building doc](/docs/workflow/building/coreclr/cross-building.md#macos-cross-building).
-
-## Create the Core_Root
-
-The Core_Root provides one of the main ways to test your build. Full instructions on how to build it in the [CoreCLR testing doc](/docs/workflow/testing/coreclr/testing.md), and we also have a detailed guide on how to use it for your own testing in [its own dedicated doc](/docs/workflow/testing/using-corerun-and-coreroot.md).
-
-## Debugging information
-
-The build process puts native component symbol and debugging information into `.dwarf` files, one for each built binary. This is not the native format used by macOS, and debuggers like LLDB can't automatically find them. The native format used by macOS is `.dSYM` bundles. To build `.dSYM` bundles and get a better inner-loop developer experience on macOS (e.g., have the LLDB debugger automatically find program symbols and display source code lines, etc.), build as follows:
-
-```bash
-./build.sh --subset clr --cmakeargs "-DCLR_CMAKE_APPLE_DSYM=TRUE"
-```
-
-(Note: converting the entire build process to build and package `.dSYM` bundles on macOS by default is tracked by [this](https://github.com/dotnet/runtime/issues/92911) issue.)
-
-## Native Sanitizers
-
-CoreCLR can be built with native sanitizers like AddressSanitizer to help catch memory safety issues. To build the project with native sanitizers, add the `-fsanitize address` argument to the build script like the following:
-
-```bash
-build.sh -s clr -fsanitize address
-```
-
-When building the repo with any native sanitizers, you should build all native components in the repo with the same set of sanitizers.
-
-The following sanitizers are supported for CoreCLR on macOS:
-
-| Sanitizer Name | `-fsanitize` argument | Support Status |
-|-----------------|-----------------------|----------------|
-| AddressSanitize | `address` | regularly tested on x64 |
diff --git a/docs/workflow/building/coreclr/nativeaot.md b/docs/workflow/building/coreclr/nativeaot.md
index ebc636fccf6..586a29ccf7e 100644
--- a/docs/workflow/building/coreclr/nativeaot.md
+++ b/docs/workflow/building/coreclr/nativeaot.md
@@ -145,6 +145,8 @@ Note that to run WASM tests targeting Browser, NodeJS that supports the exceptio
To run all the tests that got built, run `src\tests\run.cmd runnativeaottests [Debug|Release] [wasm]` on Windows, or `src/tests/run.sh --runnativeaottests [Debug|Release] [wasm]` on Linux. The `Debug`/`Release` flag should match the flag that was passed to `build.cmd` in the previous step.
+To build an individual test, follow the instructions for compiling a individual test project located in [Building an Individual Test](/docs/workflow/testing/coreclr/testing.md#building-an-individual-test), but add `/t:BuildNativeAot /p:TestBuildMode=nativeaot` to the build command.
+
To run an individual test (after it was built), navigate to the `artifacts\tests\coreclr\[windows|linux|osx[.x64.[Debug|Release]\$path_to_test` directory. `$path_to_test` matches the subtree of `src\tests`. You should see a `[.cmd|.sh]` file there. This file is a script that will compile and launch the individual test for you. Before invoking the script, set the following environment variables:
* CORE_ROOT=$repo_root\artifacts\tests\coreclr\[windows|linux|osx].x64.[Debug|Release]\Tests\Core_Root
diff --git a/docs/workflow/building/coreclr/windows-instructions.md b/docs/workflow/building/coreclr/windows-instructions.md
deleted file mode 100644
index 3ab6b33bc04..00000000000
--- a/docs/workflow/building/coreclr/windows-instructions.md
+++ /dev/null
@@ -1,67 +0,0 @@
-# Build CoreCLR on Windows
-
-* [Environment](#environment)
-* [Build the Runtime](#build-the-runtime)
- * [Cross-Compilation](#cross-compilation)
-* [Core_Root](#core_root)
-* [Native ARM64 (Experimental)](#native-arm64-experimental)
-
-This guide will walk you through building CoreCLR on Windows.
-
-## Environment
-
-Ensure you have all of the prerequisites installed from the [Windows Requirements](/docs/workflow/requirements/windows-requirements.md).
-
-## Build the Runtime
-
-To build CoreCLR on Windows, run `build.cmd` while specifying the `clr` subset:
-
-```cmd
-.\build.cmd -subset clr
-```
-
-After the build has completed, there should be some files placed in `artifacts/bin/coreclr/windows..` (for example `artifacts/bin/coreclr/windows.x64.Release`). The most important binaries are the following:
-
-* `corerun.exe`: The command line host. This program loads and starts the CoreCLR runtime and passes the managed program (e.g. `program.dll`) you want to run with it.
-* `coreclr.dll`: The CoreCLR runtime itself.
-* `System.Private.CoreLib.dll`: The core managed library, containing definitions of `Object` and base functionality.
-
-### Cross-Compilation
-
-It is possible to get Windows x86, ARM32, and ARM64 builds using an x64 machine. Instructions on how to do this are in the [cross-building doc](/docs/workflow/building/coreclr/cross-building.md#windows-cross-building).
-
-## Core_Root
-
-The Core_Root provides one of the main ways to test your build. Full instructions on how to build it in the [CoreCLR testing doc](/docs/workflow/testing/coreclr/testing.md), and we also have a detailed guide on how to use it for your own testing in [its own dedicated doc](/docs/workflow/testing/using-corerun-and-coreroot.md).
-
-## Native ARM64 (Experimental)
-
-Building natively on ARM64 requires you to have installed the appropriate ARM64 build tools and Windows SDK, as specified in the [Windows requirements doc](/docs/workflow/requirements/windows-requirements.md).
-
-Once those requirements are satisfied, you have to specify you are doing an Arm64 build, and explicitly tell the build script you want to use `MSBuild`. `Ninja` is not yet supported on Arm64 platforms.
-
-```cmd
-build.cmd -s clr -c Release -arch arm64 -msbuild
-```
-
-Since this is still in an experimental phase, the recommended way for building ARM64 is cross-compiling from an x64 machine. Instructions on how to do this can be found at the [cross-building doc](/docs/workflow/building/coreclr/cross-building.md#cross-compiling-for-arm32-and-arm64).
-
-## Native Sanitizers
-
-CoreCLR can be built with native sanitizers like AddressSanitizer to help catch memory safety issues. To build the project with native sanitizers, add the `-fsanitize address` argument to the build script like the following:
-
-```cmd
-build.cmd -s clr -fsanitize address
-```
-
-When building the repo with any native sanitizers, you should build all native components in the repo with the same set of sanitizers.
-
-The following sanitizers are supported for CoreCLR on Windows:
-
-| Sanitizer Name | Minimum VS Version | `-fsanitize` argument | Support Status |
-|----------------|--------------------|-----------------------|----------------|
-| AddressSanitizer | not yet released | `address` | experimental |
-
-## Using a custom compiler environment
-
-If you ever need to use a custom compiler environment for the native builds on Windows, you can set the `SkipVCEnvInit` environment variable to `1`. The build system will skip discovering Visual Studio and initializing its development environment when this flag is used. This is only required for very advanced scenarios and should be used rarely.
diff --git a/docs/workflow/ci/disabling-tests.md b/docs/workflow/ci/disabling-tests.md
index 2b4253c85a6..5bb2c3ceabc 100644
--- a/docs/workflow/ci/disabling-tests.md
+++ b/docs/workflow/ci/disabling-tests.md
@@ -63,6 +63,8 @@ in the [issues.targets](../../../src/tests/issues.targets) file. Additionally, t
link to a GitHub issue in the `` element. Disabling a test here can be conditioned on processor
architecture, runtime, and operating system.
+### Disabling runtime tests (src/tests) with test configuration properties
+
However, some test configurations must be disabled by editing the `.csproj` or `.ilproj` file for the test,
and inserting a property in a ``, as follows:
diff --git a/docs/workflow/debugging/coreclr/debugging-runtime.md b/docs/workflow/debugging/coreclr/debugging-runtime.md
index e1e5d499bd3..0714c7da709 100644
--- a/docs/workflow/debugging/coreclr/debugging-runtime.md
+++ b/docs/workflow/debugging/coreclr/debugging-runtime.md
@@ -223,15 +223,19 @@ Native C++ code is not everything in our runtime. Nowadays, there are lots of st
#### Resolving Signature Validation Errors in Visual Studio
-Starting with Visual Studio 2022 version 17.5, Visual Studio will validate that the debugging libraries that shipped with the .NET Runtime are correctly signed before loading them. If they are unsigned, Visual Studio will show an error like:
+Visual Studio 2022 version 17.5 and later validates that the debugging libraries shipped with the .NET Runtime are signed before loading them. If they are unsigned, Visual Studio shows an error similar to the following:
> Unable to attach to CoreCLR. Signature validation failed for a .NET Runtime Debugger library because the file is unsigned.
>
> This error is expected if you are working with non-official releases of .NET (example: daily builds from https://github.com/dotnet/sdk). See https://aka.ms/vs/unsigned-dotnet-debugger-lib for more information.
-If the target process is using a .NET Runtime that is either from a daily build, or one that you built on your own computer, this error will show up. **NOTE**: This error should never happen for official builds of the .NET Runtime from Microsoft. So don’t disable the validation if you expect to be using a .NET Runtime supported by Microsoft.
+This error occurs if the target process is using a daily build .NET Runtime or one that you built. **NOTE**: This error never happens with [released builds of the .NET Runtime from Microsoft](https://dotnet.microsoft.com/en-us/download/dotnet). ***Don’t*** disable the validation if you are using an official release of the .NET Runtime.
-There are three ways to configure Visual Studio to disable signature validation:
-1. The [`DOTNET_ROOT` environment variable](https://learn.microsoft.com/dotnet/core/tools/dotnet-environment-variables#dotnet_root-dotnet_rootx86): if Visual Studio is started from a command prompt where `DOTNET_ROOT` is set, it will ignore unsigned .NET runtime debugger libraries which are under the `DOTNET_ROOT` directory.
-2. The `VSDebugger_ValidateDotnetDebugLibSignatures` environment variable: If you want to temporarily disable signature validation, run `set VSDebugger_ValidateDotnetDebugLibSignatures=0` in a command prompt, and start Visual Studio (devenv.exe) from this command prompt.
-3. Set the `ValidateDotnetDebugLibSignatures` registry key: To disable signature validation on a more permanent basis, you can set the VS registry key to turn it off. To do so, open a Developer Command Prompt, and run `Common7\IDE\VsRegEdit.exe set local HKCU Debugger\EngineSwitches ValidateDotnetDebugLibSignatures dword 0`
+The following approaches configure Visual Studio to disable signature validation:
+
+1. The `VSDebugger_ValidateDotnetDebugLibSignatures` environment variable:
+ * This is the easiest and recommended approach to temporarily disable signature validation.
+ * At the command line, run `set VSDebugger_ValidateDotnetDebugLibSignatures=0` and then start Visual Studio (`devenv.exe`) from the same command prompt.
+ * This setting is only valid for the Visual Studio instance that is started from the command prompt where the environment variable is set.
+1. The [`DOTNET_ROOT` environment variable](https://learn.microsoft.com/dotnet/core/tools/dotnet-environment-variables#dotnet_root-dotnet_rootx86): if Visual Studio is started from a command prompt where `DOTNET_ROOT` is set, it ignores unsigned .NET runtime debugger libraries which are under the `DOTNET_ROOT` directory.
+1. ***NOT RECOMMENDED*** Set the `ValidateDotnetDebugLibSignatures` registry key: To disable signature validation on a more permanent basis, set the `Common7\IDE\VsRegEdit.exe set local HKCU Debugger\EngineSwitches ValidateDotnetDebugLibSignatures dword 0` VS registry key. For example, open a Developer Command Prompt and run `Common7\IDE\VsRegEdit.exe set local HKCU Debugger\EngineSwitches ValidateDotnetDebugLibSignatures dword 0`
diff --git a/docs/workflow/requirements/linux-requirements.md b/docs/workflow/requirements/linux-requirements.md
index 2aa1794ef1a..9eb52936d26 100644
--- a/docs/workflow/requirements/linux-requirements.md
+++ b/docs/workflow/requirements/linux-requirements.md
@@ -1,111 +1,117 @@
-# Requirements to build dotnet/runtime on Linux
+# Requirements to Set Up the Build Environment on Linux
-* [Docker](#docker)
-* [Environment](#environment)
- * [Debian-based / Ubuntu](#debian-based--ubuntu)
- * [Additional Requirements for Cross-Building](#additional-requirements-for-cross-building)
- * [Fedora](#fedora)
- * [Gentoo](#gentoo)
+- [Using your Linux Environment](#using-your-linux-environment)
+ - [Debian/Ubuntu](#debian/ubuntu)
+ - [CMake on Older Versions of Ubuntu and Debian](#cmake-on-older-versions-of-ubuntu-and-debian)
+ - [Clang for WASM](#clang-for-wasm)
+ - [Additional Tools for Cross Building](#additional-tools-for-cross-building)
+ - [Fedora](#fedora)
+ - [Gentoo](#gentoo)
+- [Using Docker](#using-docker)
-This guide will walk you through the requirements to build _dotnet/runtime_ on Linux. Before building there is environment setup that needs to happen to pull in all the dependencies required by the build.
+There are two ways to build the runtime repo on *Linux*: Set up your environment in your Linux machine, or use the Docker images that are used in the official builds. This guide will cover both of these approaches. Using Docker allows you to leverage our existing images which already have an environment set up, while using your own environment grants you better flexibility on having other tools at hand you might need.
-There are two suggested ways to go about doing this. You can use the Docker images used in the official builds, or you can set up the environment yourself. The documentation will go over both ways. Using Docker allows you to leverage our existing images which already have an environment set up, while using your own environment grants you better flexibility on having other tools at hand you might need.
+**NOTE:** If you're using WSL, then follow the instructions for the distro you have installed there.
-## Docker
+## Using your Linux Environment
-Install Docker. For further installation instructions, see [here](https://docs.docker.com/install/). Details on the images used by the official builds can be found in the [Linux building instructions doc](/docs/workflow/building/coreclr/linux-instructions.md#docker-images). All the required build tools are included in the Docker images used to do the build, so no additional setup is required.
+The following sections describe the requirements for different kinds of Linux distros. Pull Requests are welcome to add documentation regarding environments and distros currently not described here.
-## Environment
+The minimum required RAM is 1GB (builds are known to fail on 512MB VM's (https://github.com/dotnet/runtime/issues/4069), although more is recommended, as the builds can take a long time otherwise.
-Below are the requirements for toolchain setup, depending on your environment. Pull Requests are welcome to address other environments.
-
-Minimum RAM required to build is 1GB. The build is known to fail on 512 MB VMs ([dotnet/runtime#4069](https://github.com/dotnet/runtime/issues/4069)).
-
-You can use this helper script to install dependencies on some platforms:
+To get started, you can use this helper script to install dependencies on some platforms, or you can install them yourself following the instructions in the next sections. If you opt to try this script, make sure to run it as `sudo` if you don't have root privileges:
```bash
sudo eng/install-native-dependencies.sh
-# or without 'sudo' if you are root
```
-### Debian-based / Ubuntu
+Note that it is always a good idea to manually double check that all the dependencies were installed correctly if you opt to use the script.
-These instructions are written assuming the current Ubuntu LTS.
+### Debian/Ubuntu
-Install the following packages for the toolchain:
+These instructions are written assuming the current *Ubuntu LTS*.
+
+The packages you need to install are shown in the following list:
-* CMake 3.20 or newer
-* llvm
-* lld
-* clang (for WASM 16 or newer)
-* build-essential
-* python-is-python3
-* curl
-* git
-* lldb
-* libicu-dev
-* liblttng-ust-dev
-* libssl-dev
-* libkrb5-dev
-* zlib1g-dev
-* ninja-build (optional, enables building native code with ninja instead of make)
-
-**NOTE**: If you have an Ubuntu version older than 22.04 LTS, or Debian version older than 12, don't install `cmake` using `apt` directly. Follow the note written down below.
+- `CMake` (version 3.20 or newer)
+- `llvm`
+- `lld`
+- `Clang` (see the [Clang for WASM](#clang-for-wasm) section if you plan on doing work on *Web Assembly (Wasm)*)
+- `build-essential`
+- `python-is-python3`
+- `curl`
+- `git`
+- `lldb`
+- `libicu-dev`
+- `liblttng-ust-dev`
+- `libssl-dev`
+- `libkrb5-dev`
+- `ninja-build` (Optional. Enables building native code using `ninja` instead of `make`)
+
+**NOTE:** If you are running on *Ubuntu* older than version *22.04 LTS*, or *Debian* older than version 12, then don't install `cmake` using `apt` directly. Follow the instructions in the [CMake on Older Versions of Ubuntu and Debian section](#cmake-on-older-versions-of-ubuntu-and-debian) later down in this doc.
```bash
sudo apt install -y cmake llvm lld clang build-essential \
python-is-python3 curl git lldb libicu-dev liblttng-ust-dev \
- libssl-dev libkrb5-dev zlib1g-dev ninja-build
+ libssl-dev libkrb5-dev ninja-build
```
-**NOTE**: As of now, Ubuntu's `apt` only has until CMake version 3.16.3 if you're using Ubuntu 20.04 LTS (less in older Ubuntu versions), and version 3.18.4 in Debian 11 (less in older Debian versions). This is lower than the required 3.20, which in turn makes it incompatible with the repo. For this case, we can use the `snap` package manager or the _Kitware APT feed_ to get a new enough version of CMake.
-**NOTE**: If you have Ubuntu 22.04 LTS and older and your `apt` does not have clang version 16, you can add `"deb http://apt.llvm.org/$(lsb_release -s -c)/ llvm-toolchain-$(lsb_release -s -c)-18 main"` repository to your `apt`. See how we do it for linux-based containers [here](./../../../.devcontainer/Dockerfile).
+#### CMake on Older Versions of Ubuntu and Debian
+
+As of now, Ubuntu's `apt` only has until *CMake* version 3.16.3 if you're using *Ubuntu 20.04 LTS* (less in older Ubuntu versions), and version 3.18.4 in *Debian 11* (less in older Debian versions). This is lower than the required 3.20, which in turn makes it incompatible with the runtime repo. To get around this, there are two options you can choose: Use the `snap` package manager, which has a more recent version of *CMake*, or directly use the *Kitware APT Feed*.
-For snap:
+To use `snap`, run the following command:
```bash
sudo snap install cmake
```
-For the _Kitware APT feed_, follow its [instructions here](https://apt.kitware.com/).
+To use the *Kitware APT feed*, follow their official instructions [in this link](https://apt.kitware.com/).
-You now have all the required components.
+#### Clang for WASM
-#### Additional Requirements for Cross-Building
+As of now, *WASM* builds have a minimum requirement of `Clang` version 16 or later (version 18 is the latest at the time of writing this doc). If you're using *Ubuntu 22.04 LTS* or older, then you will have to add an additional repository to `apt` to be able to get said version. Run the following commands on your terminal to do this:
-If you are planning to use your Linux environment to do cross-building for other architectures (e.g. Arm32, Arm64) and/or other operating systems (e.g. Alpine, FreeBSD), you need to install these additional dependencies:
+```bash
+sudo add-apt-repository -y "deb http://apt.llvm.org/$(lsb_release -s -c)/ llvm-toolchain-$(lsb_release -s -c)-18 main"
+sudo apt update -y
+sudo apt install -y clang-18
+```
+
+You can also take a look at the Linux-based *Dockerfile* [over here](/.devcontainer/Dockerfile) for another example.
+
+#### Additional Tools for Cross Building
-* qemu
-* qemu-user-static
-* binfmt-support
-* debootstrap
+If you're planning to use your environment to do Linux cross-building to other architectures (e.g. Arm32, Arm64), and/or other operating systems (e.g. Alpine, FreeBSD), you'll need to install a few additional dependencies. It is worth mentioning these other packages are required to build the `crossrootfs`, which is used to effectively do the cross-compilation, not to build the runtime itself.
-**NOTE**: These dependencies are used to build the `crossrootfs`, not the runtime itself.
+- `qemu`
+- `qemu-user-static`
+- `binfmt-support`
+- `debootstrap`
### Fedora
-These instructions are written assuming Fedora 40.
+These instructions are written assuming *Fedora 40*.
Install the following packages for the toolchain:
-* cmake
-* llvm
-* lld
-* lldb
-* clang
-* python
-* curl
-* git
-* libicu-devel
-* openssl-devel
-* krb5-devel
-* zlib-devel
-* lttng-ust-devel
-* ninja-build (optional, enables building native code with ninja instead of make)
+- `cmake`
+- `llvm`
+- `lld`
+- `lldb`
+- `clang`
+- `python`
+- `curl`
+- `git`
+- `libicu-devel`
+- `openssl-devel`
+- `krb5-devel`
+- `lttng-ust-devel`
+- `ninja-build` (Optional. Enables building native code using `ninja` instead of `make`)
```bash
-sudo dnf install -y cmake llvm lld lldb clang python curl git libicu-devel openssl-devel \
- krb5-devel zlib-devel lttng-ust-devel ninja-build
+sudo dnf install -y cmake llvm lld lldb clang python curl git \
+ libicu-devel openssl-devel krb5-devel lttng-ust-devel ninja-build
```
### Gentoo
@@ -115,3 +121,9 @@ In case you have Gentoo you can run following command:
```bash
emerge --ask clang dev-util/lttng-ust app-crypt/mit-krb5
```
+
+## Using Docker
+
+As mentioned at the beginning of this doc, the other method to build the runtime repo for Linux is to use the prebuilt Docker images that our official builds use. In order to be able to run them, you first need to download and install the Docker Engine. The binaries needed and installation instructions can be found at the Docker official site [in this link](https://docs.docker.com/get-started/get-docker).
+
+Once you have the Docker Engine up and running, you can follow our docker building instructions [over here](/docs/workflow/using-docker.md).
diff --git a/docs/workflow/requirements/macos-requirements.md b/docs/workflow/requirements/macos-requirements.md
index 0eae7f1d621..e9606b12569 100644
--- a/docs/workflow/requirements/macos-requirements.md
+++ b/docs/workflow/requirements/macos-requirements.md
@@ -1,36 +1,29 @@
-# Requirements to build dotnet/runtime on macOS
+# Requirements to Set Up the Build Environment on macOS
-* [Environment](#environment)
- * [Xcode](#xcode)
- * [Toolchain Setup](#toolchain-setup)
+- [Xcode Developer Tools](#xcode-developer-tools)
+- [Toolchain Additional Dependencies](#toolchain-additional-dependencies)
-This guide will walk you through the requirements needed to build _dotnet/runtime_ on macOS. We'll start by showing how to set up your environment from scratch.
+To build the runtime repo on *macOS*, you will need to install the *Xcode* developer tools and a few other dependencies, described in the sections below.
-## Environment
+## Xcode Developer Tools
-Here are the components you will need to install and setup to work with the repo.
+- Install *Apple Xcode* developer tools from the [Mac App Store](https://apps.apple.com/app/xcode/id497799835).
+- Configure the *Xcode* command line tools. You can do this via one of these two methods:
+ - Run Xcode, open Preferences, and on the Locations tab, change `Command Line Tools` to point to this installation of _Xcode.app_. This usually comes already done by default, but it's always good to ensure.
+ - Alternately, you can run `sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer` in a terminal. This command assumes your Xcode app is named `Xcode.app` as it comes by default. If you've renamed it to something else, adjust the path accordingly, then run the command.
-### Xcode
+## Toolchain Additional Dependencies
-* Install Apple Xcode developer tools from the [Mac App Store](https://apps.apple.com/us/app/xcode/id497799835).
-* Configure the Xcode command line tools:
- * Run Xcode, open Preferences, and on the Locations tab, change "Command Line Tools" to point to this installation of _Xcode.app_. This usually comes already done by default, but it's always good to ensure.
- * Alternately, you can run `sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer` in a terminal (Adjust the path if you renamed _Xcode.app_).
+To build the runtime repo, you will also need to install the following dependencies:
-### Toolchain Setup
+- `CMake` 3.20 or newer
+- `icu4c`
+- `openssl@1.1` or `openssl@3`
+- `pkg-config`
+- `python3`
+- `ninja` (This one is optional. It is an alternative tool to `make` for building native code)
-Building _dotnet/runtime_ depends on several tools to be installed. You can download them individually or use [Homebrew](https://brew.sh) for easier toolchain setup.
-
-Install the following packages:
-
-* CMake 3.20 or newer
-* icu4c
-* openssl@1.1 or openssl@3
-* pkg-config
-* python3
-* ninja (optional, enables building native code with ninja instead of make)
-
-You can install all the required packages above using _Homebrew_ by running this command in the repository root:
+You can install them separately, or you can alternatively opt to install *[Homebrew](https://brew.sh/)* and use the `Brewfile` provided by the repo, which takes care of everything for you. If you go by this route, once you have *Homebrew* up and running on your machine, run the following command from the root of the repo to download and install all the necessary dependencies at once:
```bash
brew bundle --no-lock --file eng/Brewfile
diff --git a/docs/workflow/requirements/windows-requirements.md b/docs/workflow/requirements/windows-requirements.md
index ff875e7b9c5..26ba9aa187e 100644
--- a/docs/workflow/requirements/windows-requirements.md
+++ b/docs/workflow/requirements/windows-requirements.md
@@ -1,140 +1,109 @@
-# Requirements to build dotnet/runtime on Windows
+# Requirements to Set Up the Build Environment on Windows
-* [Environment](#environment)
- * [Enable Long Paths](#enable-long-paths)
- * [Visual Studio](#visual-studio)
- * [Build Tools](#build-tools)
- * [CMake](#cmake)
- * [Ninja](#ninja)
- * [Python](#python)
- * [Git](#git)
- * [PowerShell](#powershell)
- * [.NET SDK](#net-sdk)
- * [Adding to the default PATH variable](#adding-to-the-default-path-variable)
+- [Tools and Configuration](#tools-and-configuration)
+ - [Git for Windows](#git-for-windows)
+ - [Enable Long Paths](#enable-long-paths)
+ - [Visual Studio](#visual-studio)
+ - [Workloads](#workloads)
+ - [Individual Development Tools](#individual-development-tools)
+ - [Powershell](#powershell)
+ - [The .NET SDK](#the-net-sdk)
+- [Setting Environment Variables on Windows](#setting-environment-variables-on-windows)
-These instructions will lead you through the requirements to build _dotnet/runtime_ on Windows.
+To build the runtime repo on *Windows*, you will need to install *Visual Studio*, as well as certain development tools that go with it, independently of the IDE, which are described in the following sections.
-## Environment
+## Tools and Configuration
-Here are the components you will need to install and setup to work with the repo.
+### Git for Windows
-### Enable Long Paths
+- First of all, download and install [Git for Windows](https://git-scm.com/download/win) (minimum required version is 2.22.0).
+- The installer by default should add `Git` to your `PATH` environment variable, or at least have a checkbox where you can instruct it to do so. If it doesn't, or you'd prefer to set it later yourself, you can follow the instructions in the [Setting Environment Variables on Windows](#setting-environment-variables-on-windows) section of this doc.
-The runtime repository requires long paths to be enabled. Follow [the instructions provided here](https://learn.microsoft.com/windows/win32/fileio/maximum-file-path-limitation#enable-long-paths-in-windows-10-version-1607-and-later) to enable that feature.
+### Enable Long Paths
-If using Git for Windows you might need to also configure long paths there. Using an administrator terminal simply type:
+The runtime repo requires long paths to be enabled both, on Windows itself and on *Git*. To configure them on *Git*, open a terminal with administrator privileges and enter the following command:
-```cmd
+```powershell
git config --system core.longpaths true
```
-### Visual Studio
-
-Install [Visual Studio 2022](https://visualstudio.microsoft.com/downloads/). The Community edition is available free of charge. Visual Studio 2022 17.8 or later is required. Note that as we ramp up on a given release the libraries code may start using preview language features. While an older IDE may still succeed in building the projects, the IDE may report mismatched diagnostics in the Errors and Warnings window. Using the latest public preview of Visual Studio is required to ensure the IDE experience is well behaved in such scenarios.
+The reason this has to be done is that *Git for Windows* is compiled with **MSYS**, which uses a version of the Windows API that has a filepath limit of 260 characters total, as opposed to the usual limit of 4096 on macOS and Linux.
-Note that Visual Studio and the development tools described below are required, regardless of whether you plan to use the IDE or not. The installation process goes as follows:
+Next, to configure the long paths for Windows itself, follow the instructions provided [in this link](https://learn.microsoft.com/windows/win32/fileio/maximum-file-path-limitation?tabs=registry#enable-long-paths-in-windows-10-version-1607-and-later).
-* It's recommended to use **Workloads** installation approach. The following are the minimum requirements:
- * **.NET Desktop Development** with all default components,
- * **Desktop Development with C++** with all default components.
-* To build for Arm64, make sure that you have the right architecture-specific compilers installed. In the **Individual components** window, in the **Compilers, build tools, and runtimes** section:
- * For Arm64, check the box for _MSVC v143* VS 2022 C++ ARM64 build tools (Latest)_.
-* To build the tests, you will need some additional components:
- * **C++/CLI support for v143 build tools (Latest)**.
+If long paths are not enabled, you might start running into issues since trying to clone the repo. Especially with libraries that have very long filenames, you might get errors like `Unable to create file: Filename too long` during the cloning process.
-A `.vsconfig` file is included in the root of the _dotnet/runtime_ repository that includes all components needed to build the _dotnet/runtime_ repository. You can [import `.vsconfig` in your Visual Studio installer](https://learn.microsoft.com/visualstudio/install/import-export-installation-configurations?view=vs-2022#import-a-configuration) to install all necessary components.
+### Visual Studio
-### Build Tools
+Download and install the [latest version of Visual Studio](https://visualstudio.microsoft.com/downloads/) (minimum version required is VS 2022 17.8). The **Community Edition** is available free of charge. Note that as we ramp up on a given release, the libraries code may start using preview language features. While older versions of the IDE may still succeed in building the projects, the IDE may report mismatched diagnostics in the *Errors and Warnings* window. Using the latest public preview of Visual Studio fixes these cases and helps ensure the IDE experience is well behaved and displays what we would expect it to properly.
-These steps are required only in case the tools have not been installed as Visual Studio **Individual Components** (described above).
+Note that Visual Studio and its development tools are required, regardless of whether you plan to use the IDE or not.
-#### CMake
+#### Workloads
-* Install [CMake](https://cmake.org/download) for Windows.
-* Add its location (e.g. C:\Program Files (x86)\CMake\bin) to the PATH environment variable. The installation script has a check box to do this, but you can do it yourself after the fact following the instructions at [Adding to the Default PATH variable](#adding-to-the-default-path-variable).
+It is highly recommended to use the *Workloads* approach, as that installs the full bundles, which include all the necessary tools for the repo to work properly. Open up *Visual Studio Installer*, and click on *Modify* on the Visual Studio installation you plan to use. There, click on the *Workloads* tab (usually selected by default), and install the following bundles:
-The _dotnet/runtime_ repository requires using CMake 3.20 or newer.
+- .NET desktop development
+- Desktop development with C++
-**NOTE**: If you plan on using the `-msbuild` flag for building the repo, you will need version 3.21 at least. This is because the VS2022 generator doesn't exist in CMake until said version.
+To build the tests and do ARM32/ARM64 development, you'll need some additional components. You can find them by clicking on the *Individual components* tab in the *Visual Studio Installer*:
-#### Ninja
+- For ARM stuff: *MSVC v143 - VS 2022 C++ ARM64/ARM64EC build tools (Latest)* for Arm64, and *MSVC v143 - VS 2022 C++ ARM build tools (Latest)* for Arm32.
+- For building tests: *C++/CLI support for v143 build tools (Latest)*
-* Install Ninja in one of the three following ways
- * Ninja is included with Visual Studio. ARM64 Windows should use this method as other options are currently not available for ARM64.
- * [Download the executable](https://github.com/ninja-build/ninja/releases) and add its location to [the Default PATH variable](#adding-to-the-default-path-variable).
- * [Install via a package manager](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages), which should automatically add it to the PATH environment variable.
+Alternatively, there is also a `.vsconfig` file included at the root of the runtime repo. It includes all the necessary components required, outlined in a JSON format that Visual Studio can read and parse. You can boot up Visual Studio directly and [import this `.vsconfig` file](https://learn.microsoft.com/visualstudio/install/import-export-installation-configurations?view=vs-2022#import-a-configuration) instead of installing the workloads yourself. It is worth mentioning however, that while we are very careful in maintaining this file up-to-date, sometimes it might get a tad obsolete and miss important components. So, it is always a good idea to double check that the full workloads are installed.
-#### Python
+#### Individual Development Tools
-* Install [Python](https://www.python.org/downloads/) for Windows.
-* Add its location (e.g. C:\Python*\\) to the PATH environment variable.
- The installation script has a check box to do this, but you can do it yourself after the fact following the instructions at [Adding to the Default PATH variable](#adding-to-the-default-path-variable).
+All the tools you need should've been installed by Visual Studio at this point. Some of those tools, however, may not have been installed or you might prefer installing them yourself from their own sources. The main process for this is to download their installers and follow their setup. Said installers usually also prompt you to add them automatically to your `PATH` environment variable. If you miss this option, or prefer to set them yourself later on, you can follow the instructions in the [Setting Environment Variables on Windows](#setting-environment-variables-on-windows) section of this doc.
-The _dotnet/runtime_ repository requires at least Python 3.7.4.
+Here are the links where you can download these tools:
-### Git
+- *CMake*: https://cmake.org/download (minimum required version is 3.20)
+- *Ninja*: https://github.com/ninja-build/ninja/releases (latest version is most recommended)
+- *Python*: https://www.python.org/downloads/windows (minimum required version is 3.7.4)
-* Install [Git](https://git-for-windows.github.io/) for Windows.
-* Add its location (e.g. C:\Program Files\Git\cmd) to the PATH environment variable.
- The installation script has a check box to do this, but you can do it yourself after the fact following the instructions at [Adding to the Default PATH variable](#adding-to-the-default-path-variable).
+**NOTE:** If you plan on using *MSBuild* instead of *Ninja* to build the native components, then the minimum required CMake version is 3.21 instead. This is because the VS2022 generator doesn't exist in CMake until said version.
-The _dotnet/runtime_ repository requires at least Git 2.22.0.
+### Powershell
-### PowerShell
+The runtime repo also uses some `powershell` scripts as part of the Windows builds, so ensure it is accessible via your `PATH` environment variable. It is located in `%SYSTEMROOT%\System32\WindowsPowerShell\v1.0` and should be all set since you first installed Windows, but it never hurts to double check.
-* Ensure that `powershell.exe` is accessible via the PATH environment variable. Typically this is `%SYSTEMROOT%\System32\WindowsPowerShell\v1.0\` and its automatically set upon Windows installation.
-* Powershell version must be 3.0 or higher. Use `$PSVersionTable.PSVersion` to determine the engine version.
+
+The minimum required version is 3.0, and your Windows installation should have it. You can verify this by checking the `$PSVersionTable.PSVersion` variable in a Powershell terminal.
-### .NET SDK
+### The .NET SDK
-While not strictly needed to build or test this repository, having the .NET SDK installed lets you browse solution files in this repository with Visual Studio and use the `dotnet.exe` command to run .NET applications in the 'normal' way.
+While not strictly needed to build or test this repository, having the .NET SDK installed lets you browse solution files in the codebase with Visual Studio and use the `dotnet.exe` command to build and run .NET applications in the 'normal' way.
-We use this in the [build testing with the installed SDK](/docs/workflow/testing/using-your-build-with-installed-sdk.md), and [build testing with dev shipping packages](/docs/workflow/testing/using-dev-shipping-packages.md) instructions. The minimum required version of the SDK is specified in the [global.json file](https://github.com/dotnet/runtime/blob/main/global.json#L3). You can find the installers and binaries for latest development builds of .NET SDK in the [sdk repo](https://github.com/dotnet/sdk#installing-the-sdk).
+We use this in the [build testing with the installed SDK](/docs/workflow/testing/using-your-build-with-installed-sdk.md), and [build testing with dev shipping packages](/docs/workflow/testing/using-dev-shipping-packages.md) instructions. The minimum required version of the SDK is specified in the [global.json file](https://github.com/dotnet/runtime/blob/main/global.json#L3). You can find the nightly installers and binaries for the latest development builds over in the [SDK repo](https://github.com/dotnet/sdk#installing-the-sdk).
-Alternatively, to avoid modifying your machine state, you can use the repository's locally acquired SDK by passing in the solution to load via the `-vs` switch. For example:
+Alternatively, if you would rather avoid modifying your machine state, you can use the repository's locally acquired SDK by passing in the solution to load via the `-vs` switch. For example:
```cmd
.\build.cmd -vs System.Text.RegularExpressions
```
-This will set the `DOTNET_ROOT` and `PATH` environment variables to point to the locally acquired SDK under `runtime\.dotnet` and will launch the Visual Studio instance that is registered for the `sln` extension.
+This will set the `DOTNET_ROOT` and `PATH` environment variables to point to the locally acquired SDK under the `.dotnet` directory found at the root of the repo for the duration of this terminal session. Then, it will launch the Visual Studio instance that is registered for the `.sln` extension, and open the solution you passed as argument to the command-line.
-### Adding to the default PATH variable
+## Setting Environment Variables on Windows
-The commands above need to be on your command lookup path. Some installers will automatically add them to the path as part of the installation, but if not, here is how you can do it.
+As mentioned in the sections above, the commands that run the development tools have to be in your `PATH` environment variable. Their installers usually have the option to do it automatically for you enabled by the default, but if for any reason you need to set them yourself, here is how you can do it. There are two options. You can make them last only for that terminal instance, or you can set them directly to the system to make them permanent.
-You can also temporarily add a directory to the PATH environment variable with the command-prompt syntax `set PATH=%PATH%;DIRECTORY_TO_ADD_TO_PATH`. If you're working with Powershell, then the syntax would be `$Env:PATH += ";DIRECTORY_TO_ADD_TO_PATH"`. However, this change will only last until the command windows close.
+**Temporary for the Duration of the Terminal Session**
-You can make your change to the PATH variable persistent by going to _Control Panel -> System And Security -> System -> Advanced system settings -> Environment Variables_, and select the `Path` variable under `System Variables` (if you want to change it for all users) or `User Variables` (if you only want to change it for the current user).
+If you're on *Command Prompt*, issue the following command:
-Simply edit the PATH variable's value and add the directory (with a semicolon separator).
-
-### Windows on Arm64
-
-The Windows on Arm64 development experience has improved substantially over the last few years, however there are still a few steps you should take to improve performance when developing dotnet/runtime on an ARM device.
-
-During preview releases, the repo sources its compilers from the [Microsoft.NET.Compilers.Toolset](https://www.nuget.org/packages/Microsoft.Net.Compilers.Toolset/) package whose bits aren't configured for the ARM64 build of .NET framework. This can result in [suboptimal performance](https://github.com/dotnet/runtime/issues/104548) when working on libraries in Visual Studio. The issue can be worked around by [configuring the registry](https://github.com/dotnet/runtime/issues/104548#issuecomment-2214581797) to run the compiler as Arm64 processes. The proper fix that will make this workaround unnecessary is being worked on in [this PR](https://github.com/dotnet/roslyn/pull/74285).
+```cmd
+set PATH=%PATH%;
+```
-Using an Administrator Powershell prompt run the script:
+If you're on *Powershell*, then the command looks like this:
```powershell
-function SetPreferredMachineToArm64($imageName)
-{
- $RegistryPath = "HKLM:\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\${imageName}"
- $Name = "PreferredMachine"
- $Value = [convert]::ToInt32("aa64", 16)
-
- # Create the key if it does not exist
- If (-NOT (Test-Path $RegistryPath)) {
- New-Item -Path $RegistryPath -Force | Out-Null
- }
-
- # Now set the value
- New-ItemProperty -Path $RegistryPath -Name $Name -Value $Value -PropertyType DWORD -Force
-}
-
-SetPreferredMachineToArm64('csc.exe')
-SetPreferredMachineToArm64('VBCSCompiler.exe')
+$Env:PATH += ";"
```
-Then restart any open Visual Studio applications.
+**Permanently on the System**
+
+To make your environment variables changes persistent, open *Control Panel*. There, click on *System and Security* -> *System* -> *Advanced System Settings* -> *Environment Variables*. Then, there you'll notice there are two `PATH` environment variables: One under `User Variables`, and one under `System Variables`. If you want to make the changes persistent only for your current user, then edit the former one, and if you want them to spread across all accounts in that machine, then edit the latter one.
diff --git a/docs/workflow/testing/coreclr/testing.md b/docs/workflow/testing/coreclr/testing.md
index 46de9e02231..fe8d69d7a86 100644
--- a/docs/workflow/testing/coreclr/testing.md
+++ b/docs/workflow/testing/coreclr/testing.md
@@ -136,7 +136,7 @@ On macOS and Linux:
We have multiple different mechanisms of executing tests.
-Our test entrypoints are generally what we call "merged test runners", as they provide an executable runner project for multiple different test assemblies. These projects can be identified by the `` line in their .csproj file. These projects provide a simple experience for running tests. When executing a merged runner project, it will run each test sequentially and record if it passes or fails in an xunit results file. The merged test runner support runtime test filtering. If specified, the first argument to the test runner is treated as a `dotnet test --filter` argument following the xUnit rules in their documentation. Today, the runner only supports the simple form, a substring of a test's fully-qualified name, in the format `Namespace.ContainingTypeName.TypeName.Method`. If support for further filtering options is desired, please open an issue requesting it.
+Our test entrypoints are generally what we call "merged test runners", as they provide an executable runner project for multiple different test assemblies. These projects can be identified by the `` line in their .csproj file. These projects provide a simple experience for running tests. When executing a merged runner project, it will run each test sequentially and record if it passes or fails in an xunit results file. The merged test runner support runtime test filtering. If specified, the first argument to the test runner is treated as a `dotnet test --filter` argument following the xUnit rules in their documentation. Today, the runner only supports the simple form, a substring of a test's fully-qualified name, in the format `Namespace.ContainingTypeName.TypeName.Method`. Additionally, tests can be filtered using the `FullyQualifiedName=Namespace.ContainingTypeName.TypeName.Method` syntax or the `DisplayName=TestDisplayName` syntax. The display name of a test is the name printed out on the console when the test runs. Additionally, a `~` can be used instead of an `=` to specify a substring search. If support for further filtering options is desired, please open an issue requesting it.
Some tests need to be run in their own process as they interact with global process state, they have a custom test entrypoint, or they interact poorly with other tests in the same process. These tests are generally marked with `true` in their project files. These tests can be run directly, but they can also be invoked through their corresponding merged test runner. The merged test runner will invoke them as a subprocess in the same manner as if they were run individually.
@@ -150,6 +150,35 @@ To filter tests on a merged test runner built as standalone, you can set the `Te
If you wish to use the Standalone runner described in the [previous section](#the-standalone-test-runner-and-build-time-test-filtering), you can set the `BuildAllTestsAsStandalone` environment variable to `true` when invoking the `./src/tests/build.sh` or `./src/tests/build.cmd` scripts (for example, `export BuildAllTestsAsStandalone=true` or `set BuildAllTestsAsStandalone=true`). This will build all tests that are not directly in a merged test runner's project as separate executable tests and build only the tests that are compiled into the runner directly. If a runner has no tests that are built directly into the runner, then it will be excluded.
+### I added a test, which project do I run to run it?
+
+Now that we run multiple tests in a single process, determining which project corresponds to the test to run can be a bit tricky. Here's some basic steps to determine which project to run to execute a test:
+
+1. Look at the project file.
+
+If the project file has `true` or ``, then to run the test, you should build this project and run the `.cmd` or `.sh` script that corresponds to this project file.
+
+2. Look at .csproj files in parent directories.
+
+In a parent directory, you will find a `.csproj` file marked with ``. In that project file, you'll see one or more `MergedTestProjectReference` items. If one of the glob patterns in the `Include` attribute matches and the `Exclude` attribute on the same item, then the test is included in this merged test runner. To run the test, you should build this merged runner project and run the `.cmd` or `.sh` script that corresponds to this project file. You can filter the tests in this runner using the instructions in the [Test Executors section](#test-executors).
+
+### When to make a test RequiresProcessIsolation
+
+The following are common reasons to mark a test as requiring process isolation:
+
+- The test manipulates process-wide state, such as setting environment variables or changing the current directory.
+- The test requires the ability to parse command line arguments.
+- The test needs a custom main method.
+- The test requires special information, such as an app manifest, in its executable.
+- The test launches through a native executable.
+- The test sets one of the configuration properties that are checked in the test run scripts, such as those in [test-configuration.md](test-configuration.md#adding-test-guidelines).
+
+When a test is marked as `true`, it will be run in its own process and have its own `.cmd` and `.sh` scripts generated as test entrypoints. In CI, it will be executed as out of process by whichever merged test runner it is referenced by.
+
+#### Main methods in RequiresProcessIsolation tests
+
+If a custom main is not provided, the test can use `[Fact]` and `[Theory]` attributes internally. The test will use the "standalone" generator to create the test entrypoint. If you want to provide your own `Main` method for your test, set the `false` property in the test project file.
+
### Building C++/CLI Native Test Components Against the Live Ref Assemblies
By default, the _C++/CLI_ native test components build against the _ref pack_ from the SDK specified in the `global.json` file in the root of the repository. To build these components against the _ref assemblies_ produced in the build, pass the `-cmakeargs -DCPP_CLI_LIVE_REF_ASSEMBLIES=1` parameters to the test build. For example:
diff --git a/docs/workflow/trimming/feature-switches.md b/docs/workflow/trimming/feature-switches.md
index 92d04e897a2..76714d95f2c 100644
--- a/docs/workflow/trimming/feature-switches.md
+++ b/docs/workflow/trimming/feature-switches.md
@@ -14,6 +14,7 @@ configurations but their defaults might vary as any SDK can set the defaults dif
| VerifyDependencyInjectionOpenGenericServiceTrimmability | Microsoft.Extensions.DependencyInjection.VerifyOpenGenericServiceTrimmability | When set to true, DependencyInjection will verify trimming annotations applied to open generic services are correct. |
| _AggressiveAttributeTrimming | System.AggressiveAttributeTrimming | When set to true, aggressively trims attributes to allow for the most size savings possible, even if it could result in runtime behavior changes |
| _ComObjectDescriptorSupport | System.ComponentModel.TypeDescriptor.IsComObjectDescriptorSupported | When set to true, supports creating a TypeDescriptor based view of COM objects. |
+| _DataSetXmlSerializationSupport | System.Data.DataSet.XmlSerializationIsSupported | When set to false, DataSet implementation of IXmlSerializable will throw instead of using trim-incompatible XML serialization. |
| _DefaultValueAttributeSupport | System.ComponentModel.DefaultValueAttribute.IsSupported | When set to true, supports creating a DefaultValueAttribute at runtime. |
| _DesignerHostSupport | System.ComponentModel.Design.IDesignerHost.IsSupported | When set to true, supports creating design components at runtime. |
| _EnableConsumingManagedCodeFromNativeHosting | System.Runtime.InteropServices.EnableConsumingManagedCodeFromNativeHosting | Getting a managed function from native hosting is disabled when set to false and related functionality can be trimmed. |
diff --git a/docs/workflow/using-docker.md b/docs/workflow/using-docker.md
new file mode 100644
index 00000000000..6f3ca0912d4
--- /dev/null
+++ b/docs/workflow/using-docker.md
@@ -0,0 +1,79 @@
+# Using Docker for your Workflow
+
+- [Docker Basics](#docker-basics)
+- [The Official Runtime Docker Images](#the-official-runtime-docker-images)
+- [Build the Repo](#build-the-repo)
+
+This doc will cover the usage of Docker images and containers for your builds.
+
+## Docker Basics
+
+First, you have to enable and install the Docker Engine. Follow the instructions in their official site in [this link](https://docs.docker.com/get-started/get-docker) if you haven't done so.
+
+When using Docker, your machine's OS is strictly speaking not terribly important. For example, if you are on *Ubuntu 22.04*, you can use the *Ubuntu 18.04* image without any issues whatsoever. Likewise, you can run Linux images on Windows if you have WSL enabled. If you followed the instructions from the Docker official website when installing the engine, you most likely have it already up and running. If not, you can follow the instructions in [this link](https://learn.microsoft.com/windows/wsl/install) to enable it. However, note that you can't run multiple OS's on the same *Docker Daemon*, as it takes resources from the underlying kernel as needed. In other words, you can run either Linux on WSL, or Windows containers. You have to switch between them if you need both, and restart Docker.
+
+The target architecture is more important to consider when using Docker containers. The image's architecture has to match your machine's supported platforms. For instance, you can run both, x64 and Arm64 images on an *Apple Silicon Mac*, thanks to the *Rosetta* x64 emulator it provides. Likewise, you can run Linux Arm32 images on a Linux Arm64 host.
+
+Note that while Docker uses WSL to run the Linux containers on Windows, you don't have to boot up a WSL terminal to run them. Any `cmd` or `powershell` terminal with the `docker` command available will suffice to run all the commands. Docker takes care of the rest.
+
+## The Official Runtime Docker Images
+
+In the following tables, you will find the full names with tags of the images used for the official builds.
+
+**Main Docker Images**
+
+The main Docker images are the most commonly used ones, and the ones you will probably need for your builds. If you are working with more specific scenarios (e.g. Android, Risc-V), then you will find the images you need in the *Extended Docker Images* table right below this one.
+
+| Host OS | Target OS | Target Arch | Image | crossrootfs dir |
+| ----------------- | ------------ | --------------- | -------------------------------------------------------------------------------------- | -------------------- |
+| Azure Linux (x64) | Alpine 3.13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64-alpine` | `/crossrootfs/x64` |
+| Azure Linux (x64) | Ubuntu 16.04 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64` | `/crossrootfs/x64` |
+| Azure Linux (x64) | Alpine 3.13 | Arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm-alpine` | `/crossrootfs/arm` |
+| Azure Linux (x64) | Ubuntu 22.04 | Arm32 (armhf) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm` | `/crossrootfs/arm` |
+| Azure Linux (x64) | Alpine 3.13 | Arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm64-alpine` | `/crossrootfs/arm64` |
+| Azure Linux (x64) | Ubuntu 16.04 | Arm64 (arm64v8) | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm64` | `/crossrootfs/arm64` |
+| Azure Linux (x64) | Ubuntu 16.04 | x86 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-x86` | `/crossrootfs/x86` |
+
+**Extended Docker Images**
+
+| Host OS | Target OS | Target Arch | Image | crossrootfs dir |
+| ----------------- | -------------------------- | ------------- | --------------------------------------------------------------------------------------- | ---------------------- |
+| Azure Linux (x64) | Android Bionic | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-android-amd64` | *N/A* |
+| Azure Linux (x64) | Android Bionic (w/OpenSSL) | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-android-openssl` | *N/A* |
+| Azure Linux (x64) | Android Bionic (w/Docker) | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-android-docker` | *N/A* |
+| Azure Linux (x64) | Azure Linux 3.0 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-fpm` | *N/A* |
+| Azure Linux (x64) | FreeBSD 13 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-freebsd-13` | `/crossrootfs/x64` |
+| Azure Linux (x64) | Ubuntu 18.04 | PPC64le | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-ppc64le` | `/crossrootfs/ppc64le` |
+| Azure Linux (x64) | Ubuntu 24.04 | RISC-V | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-riscv64` | `/crossrootfs/riscv64` |
+| Azure Linux (x64) | Ubuntu 18.04 | S390x | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-s390x` | `/crossrootfs/s390x` |
+| Azure Linux (x64) | Ubuntu 16.04 (Wasm) | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-webassembly-amd64` | `/crossrootfs/x64` |
+| Debian (x64) | Debian 12 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:debian-12-gcc14-amd64` | *N/A* |
+| Ubuntu (x64)* | Ubuntu 22.04 | x64 | `mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-debpkg` | *N/A* |
+| Ubuntu (x64) | Tizen 9.0 | Arm32 (armel) | `mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-cross-armel-tizen` | `/crossrootfs/armel` |
+| Ubuntu (x64) | Ubuntu 20.04 | Arm32 (v6) | `mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-20.04-cross-armv6-raspbian-10` | `/crossrootfs/armv6` |
+
+**NOTE:** The Ubuntu image marked with an * in the table above is only used for producing *deb* packages, but not for building any product code.
+
+## Build the Repo
+
+Once you've chosen the image that suits your needs, you can issue `docker run` with the necessary arguments to use your clone of the runtime repo, and call the build scripts as you need. Down below, we have a small command-line example, explaining each of the flags you might need to use:
+
+```bash
+docker run --rm \
+ -v :/runtime \
+ -w /runtime \
+ mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64 \
+ ./build.sh --subset clr --configuration Checked
+```
+
+Now, dissecting the command:
+
+- `--rm`: Erase the created container after it finishes running.
+- `-v :/runtime`: Mount the runtime repo clone located in `` to the container path `/runtime`.
+- `-w /runtime`: Start the container in the `/runtime` directory.
+- `mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64`: The fully qualified name of the Docker image to download. In this case, we want to use an *Azure Linux* image to target the *x64* architecture.
+- `./build.sh --subset clr --configuration Checked`: The build command to run in the repo. In this case, we want to build the *Clr* subset in the *Checked* configuration.
+
+You might also want to interact with the container directly for a myriad of reasons, like running multiple builds in different paths for example. In this case, instead of passing the build script command to the `docker` command-line, pass the flag `-it`. When you do this, you will get access to a small shell within the container, which allows you to explore it, run builds manually, and so on, like you would on a regular terminal in your machine. Note that the containers' shell's built-in tools are very limited in comparison to the ones you probably have on your machine, so don't expect to be able to do full work there.
+
+To do cross-building using Docker, make sure to select the appropriate image that targets the platform you want to build for. As for the commands to run, follow the same instructions from the cross-building doc [over here](/docs/workflow/building/coreclr/cross-building.md), with the difference that you don't need to generate the *ROOTFS*, as the cross-building images already include it.
diff --git a/eng/DotNetBuild.props b/eng/DotNetBuild.props
index 3569781b9af..ce13d6ca5b0 100644
--- a/eng/DotNetBuild.props
+++ b/eng/DotNetBuild.props
@@ -88,7 +88,7 @@
+$(UseSystemLibs)+
- $(InnerBuildArgs) --cmakeargs -DCLR_CMAKE_USE_SYSTEM_BROTLI=true
+ $(InnerBuildArgs) --cmakeargs -DCLR_CMAKE_USE_SYSTEM_BROTLI=true$(InnerBuildArgs) --cmakeargs -DCLR_CMAKE_USE_SYSTEM_LIBUNWIND=true
diff --git a/eng/SourceBuildPrebuiltBaseline.xml b/eng/SourceBuildPrebuiltBaseline.xml
index 4e366ddb469..4185d755fa6 100644
--- a/eng/SourceBuildPrebuiltBaseline.xml
+++ b/eng/SourceBuildPrebuiltBaseline.xml
@@ -6,6 +6,7 @@
+
diff --git a/eng/Subsets.props b/eng/Subsets.props
index fa12195a7b5..d36d18894d9 100644
--- a/eng/Subsets.props
+++ b/eng/Subsets.props
@@ -204,8 +204,6 @@
-
-
@@ -498,20 +496,16 @@
-
+
- %(AdditionalProperties);RefOnly=true
-
+ '$(BuildAllConfigurations)' == 'true'" />
-
-
- %(AdditionalProperties);RefOnly=true
-
+
+
@@ -614,6 +608,7 @@
%(AdditionalProperties);Configuration=$(ToolsConfiguration)
+ %(AdditionalProperties);RuntimeConfiguration=$(RuntimeConfiguration)%(AdditionalProperties);LibrariesConfiguration=$(LibrariesConfiguration)%(AdditionalProperties);HostConfiguration=$(HostConfiguration)%(AdditionalProperties);TasksConfiguration=$(TasksConfiguration)
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index c11cd27877a..7ba1021c032 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,8 +1,8 @@
-
+ https://github.com/dotnet/icu
- 99e6c98ad1faf55e6335ab768dab5917b456a87f
+ 02ededbbc13185818d31bdae9777a12710a80d23https://github.com/dotnet/msquic
@@ -12,37 +12,37 @@
https://github.com/dotnet/wcf7f504aabb1988e9a093c1e74d8040bd52feb2f01
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1https://github.com/dotnet/command-line-api
@@ -54,319 +54,315 @@
803d8598f98fb4efd94604b32627ee9407f246db
-
+ https://github.com/dotnet/cecil
- b9d928a9d65ed39b9257846e1b8e853cea609c00
+ 1541df9c44ff8da964b2946e18655c2e37e4a198
-
+ https://github.com/dotnet/cecil
- b9d928a9d65ed39b9257846e1b8e853cea609c00
+ 1541df9c44ff8da964b2946e18655c2e37e4a198
-
+ https://github.com/dotnet/emsdk
- edf3e90fa25b1fc4f7f63ceb45ef70f49c6b121a
+ 530feeca3813957c21b096ac371a1cf98ac11b76
-
+ https://github.com/dotnet/emsdk
- edf3e90fa25b1fc4f7f63ceb45ef70f49c6b121a
+ 530feeca3813957c21b096ac371a1cf98ac11b76
-
+ https://github.com/dotnet/source-build-reference-packages
- 26ae006620fb4518276b494efac55dbcd8f80224
+ bdd698774daa248301c236f09b97015610ca2842
-
+ https://github.com/dotnet/source-build-externals
- 51b029e3272f35af0af337823cd122725f316c69
+ 2a2ecbfbee3f87786d9de50e886a72773f0ba449
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/llvm-project
- 3358dfd351b424698f3f2cd67432dc62c333a64d
+ b9b4464b3b10c1961ed0ff39b5f33b3b3bbf62d1
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
+ https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
+ 7cb32e193a55a95c74fc3bd56501b951b48b700f
-
- https://github.com/dotnet/runtime
- 1cc0186c3e120ee4ed0494cf74fef0a3ef0118d6
-
-
+ https://github.com/dotnet/xharness
- 9794254fa909ff5adc46326e9b54009793f61dcd
+ f20e52f7731da99588dd6b4f4bd60119f03220a3
-
+ https://github.com/dotnet/xharness
- 9794254fa909ff5adc46326e9b54009793f61dcd
+ f20e52f7731da99588dd6b4f4bd60119f03220a3
-
+ https://github.com/dotnet/xharness
- 9794254fa909ff5adc46326e9b54009793f61dcd
+ f20e52f7731da99588dd6b4f4bd60119f03220a3
-
+ https://github.com/dotnet/arcade
- 60ae233c3d77f11c5fdb53e570b64d503b13ba59
+ 34138f08e2c9c3c41a0ac8af583a57ea953e3821
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
-
+ https://github.com/dotnet/hotreload-utils
- b97425c36aae0f46a22f2893309771383bfa9b30
+ 2bf22c4e7b7036e87d29d2f2d76e99ddf57dab52
-
+ https://github.com/dotnet/runtime-assets
- 3ed40d4f2335532ee7b04f8449468c52b4e2a643
+ 928df39794e0f6677b50d6d8f690c52880c58d13
-
+ https://github.com/dotnet/roslyn
- c098e96e5470fccc9c8e9f32189f408d086c984e
+ 08a167c19e5e04742b0922bdb1ea8046e9364f4b
-
+ https://github.com/dotnet/roslyn
- c098e96e5470fccc9c8e9f32189f408d086c984e
+ 08a167c19e5e04742b0922bdb1ea8046e9364f4b
-
+ https://github.com/dotnet/roslyn
- c098e96e5470fccc9c8e9f32189f408d086c984e
+ 08a167c19e5e04742b0922bdb1ea8046e9364f4bhttps://github.com/dotnet/roslyn-analyzers
@@ -377,28 +373,28 @@
3211f48253bc18560156d90dc5e710d35f7d03fa
-
+ https://github.com/dotnet/roslyn
- c098e96e5470fccc9c8e9f32189f408d086c984e
+ 08a167c19e5e04742b0922bdb1ea8046e9364f4b
-
+ https://github.com/dotnet/sdk
- e028762ceb5ccaa05a8c93c29ce5bbcae926533a
+ 8d10b56a313f40d3575133805ce4cff8f41e0ac6
-
+ https://github.com/dotnet/sdk
- e028762ceb5ccaa05a8c93c29ce5bbcae926533a
+ 8d10b56a313f40d3575133805ce4cff8f41e0ac6
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
-
+ https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 1b39aa819a38d504f7f828c28ff173acca7f99e2
+ 8674aaa459d33551d419fece377f5512a1d93689
diff --git a/eng/Versions.props b/eng/Versions.props
index 2aa1a4fa295..dfa31e0d2e1 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -1,16 +1,16 @@
- 9.0.0
+ 10.0.0
- 9
+ 10009.0.100
- 8.0.7
+ 8.0.87.0.206.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet8)').Build),25))
- rc
+ alpha1false
@@ -19,7 +19,8 @@
$(SdkBandVersion)$(WorkloadVersionSuffix)
- $(MajorVersion).$(MinorVersion).0.0
+
+ 9.0.0.0falsefalse
@@ -42,9 +43,9 @@
Any tools that contribute to the design-time experience should use the MicrosoftCodeAnalysisVersion_LatestVS property above to ensure
they do not break the local dev experience.
-->
- 4.12.0-2.24408.4
- 4.12.0-2.24408.4
- 4.12.0-2.24408.4
+ 4.12.0-2.24421.9
+ 4.12.0-2.24421.9
+ 4.12.0-2.24421.9
- 9.0.100-rc.1.24378.3
+ 10.0.100-alpha.2.24459.4
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 2.9.0-beta.24408.2
- 9.0.0-beta.24408.2
- 2.9.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
- 9.0.0-beta.24408.2
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 2.9.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 2.9.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.1
+ 10.0.0-beta.24467.11.4.06.0.0-preview.1.102
- 9.0.0-rc.1.24403.1
+ 9.0.0-rc.1.24410.56.0.0
- 9.0.0-rc.1.24403.1
+ 9.0.0-rc.1.24410.56.0.01.1.1
@@ -119,52 +120,50 @@
8.0.05.0.04.5.5
- 9.0.0-rc.1.24403.1
- 9.0.0-rc.1.24403.1
+ 9.0.0-rc.1.24410.5
+ 9.0.0-rc.1.24410.56.0.05.0.05.0.05.0.07.0.0
- 9.0.0-rc.1.24403.1
+ 9.0.0-rc.1.24410.56.0.07.0.04.5.44.5.0
- 9.0.0-rc.1.24403.1
+ 9.0.0-rc.1.24410.58.0.08.0.08.0.08.0.0
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
- 9.0.0-beta.24407.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
+ 10.0.0-beta.24459.1
- 1.0.0-prerelease.24376.4
- 1.0.0-prerelease.24376.4
- 1.0.0-prerelease.24376.4
- 1.0.0-prerelease.24376.4
- 1.0.0-prerelease.24376.4
- 1.0.0-prerelease.24376.4
+ 1.0.0-prerelease.24409.2
+ 1.0.0-prerelease.24409.2
+ 1.0.0-prerelease.24409.2
+ 1.0.0-prerelease.24409.2
+ 1.0.0-prerelease.24409.2
+ 1.0.0-prerelease.24409.22.0.017.10.0-beta1.24272.12.0.0-beta4.24324.3
-
- 9.0.0-preview.7.24327.23.1.72.1.02.0.3
@@ -181,10 +180,10 @@
1.4.017.4.0-preview-20220707-01
- 9.0.0-prerelease.24405.1
- 9.0.0-prerelease.24405.1
- 9.0.0-prerelease.24405.1
- 9.0.0-alpha.0.24405.1
+ 10.0.0-prerelease.24466.1
+ 10.0.0-prerelease.24466.1
+ 10.0.0-prerelease.24466.1
+ 9.0.0-alpha.0.24419.13.12.04.5.06.0.0
@@ -212,52 +211,51 @@
8.0.0-preview-20230918.1
- 0.11.5-alpha.24379.1
-
- 9.0.0-rc.1.24403.1
+ 0.11.5-alpha.24460.1
- 9.0.0-rc.1.24373.1
+ 10.0.0-alpha.1.24459.2
- 2.3.6
+ 2.4.39.0.0-alpha.1.24167.3
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
- 9.0.0-rc.1.24402.2
+ 10.0.0-alpha.1.24461.1$(MicrosoftNETWorkloadEmscriptenCurrentManifest90100TransportVersion)1.1.87-gba258badda1.0.0-v3.14.0.5722
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
- 19.0.0-alpha.1.24370.2
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.1
+ 19.0.0-alpha.1.24401.13.1.71.0.406601
- $(MicrosoftDotNetApiCompatTaskVersion)
+
+ 9.0.100-rc.1.24452.129.0.0-alpha.1.24175.1$(MicrosoftNETRuntimeEmscriptenVersion)$(runtimewinx64MicrosoftNETCoreRuntimeWasmNodeTransportPackageVersion)
diff --git a/eng/build.ps1 b/eng/build.ps1
index 4a31c90180b..2356e83f95b 100644
--- a/eng/build.ps1
+++ b/eng/build.ps1
@@ -329,7 +329,7 @@ foreach ($argument in $PSBoundParameters.Keys)
}
if ($env:TreatWarningsAsErrors -eq 'false') {
- $arguments += " -warnAsError 0"
+ $arguments += " -warnAsError `$false"
}
# disable terminal logger for now: https://github.com/dotnet/runtime/issues/97211
diff --git a/eng/build.sh b/eng/build.sh
index df632614667..99c9228a1fa 100755
--- a/eng/build.sh
+++ b/eng/build.sh
@@ -550,7 +550,7 @@ if [[ "$os" == "wasi" ]]; then
fi
if [[ "${TreatWarningsAsErrors:-}" == "false" ]]; then
- arguments="$arguments -warnAsError 0"
+ arguments="$arguments -warnAsError false"
fi
# disable terminal logger for now: https://github.com/dotnet/runtime/issues/97211
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 2b0a5c9e665..5db4ad71ee2 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -157,7 +157,7 @@ if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
}
-$dotnetVersions = @('5','6','7','8')
+$dotnetVersions = @('5','6','7','8','9')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index b493479a1da..4604b61b032 100644
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -99,7 +99,7 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7' '8')
+DotNetVersions=('5' '6' '7' '8' '9')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
index c732bee9f4a..ba53ebfbd51 100644
--- a/eng/common/core-templates/job/job.yml
+++ b/eng/common/core-templates/job/job.yml
@@ -33,11 +33,6 @@ parameters:
artifactPublishSteps: []
runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 9.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
-
# 1es specific parameters
is1ESPipeline: ''
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
index 205fb5b3a39..30530359a5d 100644
--- a/eng/common/core-templates/job/source-index-stage1.yml
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -1,8 +1,5 @@
parameters:
runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20240522.1
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
@@ -16,12 +13,6 @@ jobs:
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- template: /eng/common/core-templates/variables/pool-providers.yml
@@ -34,12 +25,10 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
- image: 1es-windows-2022-open
- os: windows
+ image: windows.vs2022.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
+ image: windows.vs2022.amd64
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
@@ -47,35 +36,9 @@ jobs:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Log in to Azure and upload stage1 artifacts to source index
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
+ - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ binLogPath: ${{ parameters.binLogPath }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
index 80788c52319..de24d0087c5 100644
--- a/eng/common/core-templates/steps/publish-logs.yml
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -34,7 +34,9 @@ steps:
'$(akams-client-id)'
'$(microsoft-symbol-server-pat)'
'$(symweb-symbol-server-pat)'
+ '$(dnceng-symbol-server-pat)'
'$(dn-bot-all-orgs-build-rw-code-rw)'
+ '$(System.AccessToken)'
${{parameters.CustomSensitiveDataList}}
continueOnError: true
condition: always()
@@ -45,6 +47,7 @@ steps:
SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ condition: always()
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..473a22c4719
--- /dev/null
+++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,35 @@
+parameters:
+ sourceIndexUploadPackageVersion: 2.0.0-20240522.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ binlogPath: artifacts/log/Debug/Build.binlog
+
+steps:
+- task: UseDotNet@2
+ displayName: "Source Index: Use .NET 8 SDK"
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: "Source Index: Download netsourceindex Tools"
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: "Source Index: Process Binlog into indexable sln"
+
+- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: "Source Index: Upload Source Index stage1 artifacts to Azure"
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index e925952d566..32f79dfb340 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -4,6 +4,7 @@
net472false
+ false
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
index 5ef6c30ba92..98bbc1ded0b 100644
--- a/eng/common/template-guidance.md
+++ b/eng/common/template-guidance.md
@@ -57,7 +57,7 @@ extends:
Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
-# Development notes
+## Development notes
**Folder / file structure**
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 0c2928d5c79..3d16b41c78c 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -1,8 +1,22 @@
+parameters:
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: true
+ componentGovernanceSteps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/templates/steps/generate-sbom.yml
+ parameters:
+ PackageVersion: ${{ parameters.packageVersion }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ publishArtifacts: false
+
# publish artifacts
# for 1ES managed templates, use the templateContext.output to handle multiple outputs.
templateContext:
diff --git a/eng/common/templates-official/steps/source-index-stage1-publish.yml b/eng/common/templates-official/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..9b8b80942b5
--- /dev/null
+++ b/eng/common/templates-official/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 8da477dd69f..07d317bf8f9 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -19,71 +19,63 @@ jobs:
steps:
- ${{ each step in parameters.steps }}:
- ${{ step }}
-
+
componentGovernanceSteps:
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
+ - template: /eng/common/templates/steps/component-governance.yml
+ parameters:
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- publishArtifacts: false
-
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
artifactPublishSteps:
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
- parameters:
- is1ESPipeline: false
- args:
- displayName: Publish pipeline artifacts
- pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- publishLocation: Container
- artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
- parameters:
- is1ESPipeline: false
- args:
- targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
- sbomEnabled: false # we don't need SBOM for logs
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: false
args:
- displayName: Publish Logs
- pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ displayName: Publish pipeline artifacts
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
publishLocation: Container
- artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
continueOnError: true
- sbomEnabled: false # we don't need SBOM for BuildConfiguration
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for BuildConfiguration
diff --git a/eng/common/templates/steps/source-index-stage1-publish.yml b/eng/common/templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..182cec33a7b
--- /dev/null
+++ b/eng/common/templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/generators.targets b/eng/generators.targets
index b5ca3ca89ee..b5c8b54a107 100644
--- a/eng/generators.targets
+++ b/eng/generators.targets
@@ -1,19 +1,10 @@
-
-
- true
-
-
-
-
+
+
+
+
+
+ '$(MSBuildProjectExtension)' == '.csproj' and
+ (
+ '$(DisableImplicitFrameworkReferences)' == 'true' and
+ '@(Reference->AnyHaveMetadataValue('Identity', 'System.Runtime.InteropServices'))' == 'true'
+ )" />
+ @(EnabledGenerators->AnyHaveMetadataValue('Identity', 'DownlevelLibraryImportGenerator'))">
@@ -57,6 +55,11 @@
ReferenceOutputAssembly="false"
OutputItemType="Analyzer"
SetConfiguration="Configuration=$(LibrariesConfiguration)" />
+
+ DependsOnTargets="GetTargetPathWithTargetPlatformMoniker"
+ BeforeTargets="GetTargetPath">
diff --git a/eng/install-native-dependencies.sh b/eng/install-native-dependencies.sh
index 41895e0b925..57ab7848f8e 100755
--- a/eng/install-native-dependencies.sh
+++ b/eng/install-native-dependencies.sh
@@ -23,14 +23,14 @@ case "$os" in
if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
apt update
- apt install -y build-essential gettext locales cmake llvm clang lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
+ apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
libssl-dev libkrb5-dev zlib1g-dev
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
elif [ "$ID" = "fedora" ]; then
dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel zlib-devel lttng-ust-devel
elif [ "$ID" = "alpine" ]; then
- apk add build-base cmake bash curl clang llvm-dev krb5-dev lttng-ust-dev icu-dev zlib-dev openssl-dev
+ apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev zlib-dev openssl-dev
else
echo "Unsupported distro. distro: $ID"
exit 1
diff --git a/eng/intellisense.targets b/eng/intellisense.targets
index 7e1cb0054d3..53614511d96 100644
--- a/eng/intellisense.targets
+++ b/eng/intellisense.targets
@@ -11,7 +11,7 @@
$(IntellisensePackageXmlFilePathFromNetFolder)$(IntellisensePackageXmlFilePathFromDotNetPlatExtFolder)
- $(IntermediateOutputPath)$(TargetName).intellisense-package.xml
+ $([MSBuild]::NormalizePath('$(IntermediateOutputPath)', 'intellisense-package', '$(TargetName).xml'))$(NoWarn);CS1591
@@ -66,8 +66,8 @@
+ DependsOnTargets="ChangeDocumentationFileForPackaging;GetTargetPathWithTargetPlatformMoniker"
+ BeforeTargets="GetTargetPath">
diff --git a/eng/liveBuilds.targets b/eng/liveBuilds.targets
index cb216d6c3ad..3446081a627 100644
--- a/eng/liveBuilds.targets
+++ b/eng/liveBuilds.targets
@@ -64,6 +64,15 @@
$([MSBuild]::NormalizePath('$(CoreCLRArtifactsPath)', 'corehost', 'singlefilehost$(ExeSuffix)'))
+
+
+ true
+
+
+
+
+
+
diff --git a/eng/native/configurecompiler.cmake b/eng/native/configurecompiler.cmake
index d6ab391eb62..ffc3abd2871 100644
--- a/eng/native/configurecompiler.cmake
+++ b/eng/native/configurecompiler.cmake
@@ -304,7 +304,7 @@ elseif(CLR_CMAKE_HOST_SUNOS)
add_compile_options($<$:-Wa,--noexecstack>)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fstack-protector")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstack-protector")
- add_definitions(-D__EXTENSIONS__ -D_XPG4_2 -D_POSIX_PTHREAD_SEMANTICS)
+ add_definitions(-D__EXTENSIONS__ -D_XPG4_2 -D_POSIX_PTHREAD_SEMANTICS -D_REENTRANT)
elseif(CLR_CMAKE_HOST_OSX AND NOT CLR_CMAKE_HOST_MACCATALYST AND NOT CLR_CMAKE_HOST_IOS AND NOT CLR_CMAKE_HOST_TVOS)
add_definitions(-D_XOPEN_SOURCE)
diff --git a/eng/packaging.targets b/eng/packaging.targets
index 207821cf95c..99912459fe0 100644
--- a/eng/packaging.targets
+++ b/eng/packaging.targets
@@ -22,6 +22,7 @@
$(MSBuildThisFileDirectory)useSharedDesignerContext.txt
+ truePACKAGE.mdPACKAGE.md$(BeforePack);ValidatePackageReadmeExists
@@ -202,6 +203,21 @@
+
+
+
+
+
diff --git a/eng/pipelines/common/global-build-job.yml b/eng/pipelines/common/global-build-job.yml
index b068861190c..e370a42f47d 100644
--- a/eng/pipelines/common/global-build-job.yml
+++ b/eng/pipelines/common/global-build-job.yml
@@ -35,6 +35,7 @@ parameters:
preBuildSteps: []
templatePath: 'templates'
templateContext: ''
+ disableComponentGovernance: ''
jobs:
- template: /eng/common/${{ parameters.templatePath }}/job/job.yml
diff --git a/eng/pipelines/common/perf-variables.yml b/eng/pipelines/common/perf-variables.yml
deleted file mode 100644
index 9b14ab5067a..00000000000
--- a/eng/pipelines/common/perf-variables.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-variables:
- - name: _wasmCollectHelixLogsScript
- value: >-
- test -d "$HELIX_WORKITEM_UPLOAD_ROOT" && (
- export _PERF_DIR=$HELIX_WORKITEM_ROOT/performance;
- mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log;
- find $_PERF_DIR -name '*.binlog' | xargs -I{} cp {} $HELIX_WORKITEM_UPLOAD_ROOT/log;
- test "$_commandExitCode" -eq 0 || (
- mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/obj;
- mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/bin;
- mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/BenchmarkDotNet.Autogenerated/obj;
- mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/for-running;
- cp -R $_PERF_DIR/artifacts/obj/MicroBenchmarks $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/obj;
- cp -R $_PERF_DIR/artifacts/bin/MicroBenchmarks $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/bin;
- cp -R $_PERF_DIR/artifacts/obj/BenchmarkDotNet.Autogenerated $HELIX_WORKITEM_UPLOAD_ROOT/log/BenchmarkDotNet.Autogenerated/obj;
- cp -R $_PERF_DIR/artifacts/bin/for-running $HELIX_WORKITEM_UPLOAD_ROOT/log/for-running))
diff --git a/eng/pipelines/common/templates/pipeline-with-resources.yml b/eng/pipelines/common/templates/pipeline-with-resources.yml
index cf2b9f53e52..90851b8d725 100644
--- a/eng/pipelines/common/templates/pipeline-with-resources.yml
+++ b/eng/pipelines/common/templates/pipeline-with-resources.yml
@@ -17,7 +17,7 @@ extends:
containers:
linux_arm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm
env:
ROOTFS_DIR: /crossrootfs/arm
@@ -27,44 +27,44 @@ extends:
ROOTFS_DIR: /crossrootfs/armv6
linux_arm64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm64
env:
ROOTFS_DIR: /crossrootfs/arm64
linux_musl_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64-alpine
env:
ROOTFS_DIR: /crossrootfs/x64
linux_musl_arm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm-alpine-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm-alpine
env:
ROOTFS_DIR: /crossrootfs/arm
linux_musl_arm64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-arm64-alpine-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-arm64-alpine
env:
ROOTFS_DIR: /crossrootfs/arm64
# This container contains all required toolsets to build for Android and for Linux with bionic libc.
android:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-android-amd64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-android-amd64
# This container contains all required toolsets to build for Android and for Linux with bionic libc and a special layout of OpenSSL.
linux_bionic:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-android-openssl-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-android-openssl
# This container contains all required toolsets to build for Android as well as tooling to build docker images.
android_docker:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-android-docker-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-android-docker
linux_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64
env:
ROOTFS_DIR: /crossrootfs/x64
linux_x86:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-x86-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-x86
env:
ROOTFS_DIR: /crossrootfs/x86
@@ -75,7 +75,7 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-WithNode
linux_x64_sanitizer:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-net9.0-sanitizer
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-amd64-sanitizer
env:
ROOTFS_DIR: /crossrootfs/x64
@@ -88,17 +88,17 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:almalinux-8-source-build
linux_s390x:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-s390x-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-s390x
env:
ROOTFS_DIR: /crossrootfs/s390x
linux_ppc64le:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-ppc64le-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-ppc64le
env:
ROOTFS_DIR: /crossrootfs/ppc64le
linux_riscv64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-riscv64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-riscv64
env:
ROOTFS_DIR: /crossrootfs/riscv64
@@ -109,17 +109,17 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8
browser_wasm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-webassembly-amd64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-webassembly-amd64
env:
ROOTFS_DIR: /crossrootfs/x64
wasi_wasm:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-webassembly-amd64-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-webassembly-amd64
env:
ROOTFS_DIR: /crossrootfs/x64
freebsd_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-freebsd-13-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-cross-freebsd-13
env:
ROOTFS_DIR: /crossrootfs/x64
@@ -132,4 +132,4 @@ extends:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-debpkg
rpmpkg:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-fpm-net9.0
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-net9.0-fpm
diff --git a/eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml b/eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
index b0a2043bbd5..2ac52ca77eb 100644
--- a/eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+++ b/eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
@@ -19,6 +19,9 @@ parameters:
variables: {}
pool: ''
dependsOn: []
+ compileOnHelix: false
+ interpreter: false
+ buildAllTestsAsStandalone: false
#arcade-specific parameters
condition: always()
continueOnError: false
@@ -27,8 +30,6 @@ parameters:
enableMicrobuild: ''
gatherAssetManifests: false
shouldContinueOnError: false
- compileOnHelix: false
- interpreter: false
steps:
- template: /eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
@@ -38,6 +39,7 @@ steps:
archType: ${{ parameters.archType }}
buildConfig: ${{ parameters.buildConfig }}
testBuildArgs: ${{ parameters.testBuildArgs }}
+ buildAllTestsAsStandalone: ${{ parameters.buildAllTestsAsStandalone }}
# Build a Mono AOT cross-compiler for non-amd64 targets (in this case, just arm64)
- ${{ if and(eq(parameters.runtimeFlavor, 'mono'), in(parameters.runtimeVariant, 'llvmaot', 'llvmfullaot', 'minifullaot')) }}:
- ${{ if eq(parameters.archType, 'arm64') }}:
diff --git a/eng/pipelines/common/templates/runtimes/build-runtime-tests.yml b/eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
index f9ef84a49c1..a822ccf28fd 100644
--- a/eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
+++ b/eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
@@ -4,6 +4,7 @@ parameters:
archType: ''
buildConfig: ''
testBuildArgs: ''
+ buildAllTestsAsStandalone: false
#arcade-specific parameters
condition: always()
continueOnError: false
@@ -18,6 +19,13 @@ steps:
- ${{ if eq(parameters.osGroup, 'windows') }}:
- script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(crossArg) ci ${{ parameters.archType }} $(buildConfigUpper) $(_nativeSanitizersArg) $(priorityArg) $(runtimeFlavorArgs) ${{ parameters.testBuildArgs }} $(runtimeVariantArg) $(librariesConfigurationArg)
displayName: Build Tests
+ env:
+ ${{ if eq(parameters.buildAllTestsAsStandalone, true) }}:
+ BuildAllTestsAsStandalone: true
+
- ${{ if ne(parameters.osGroup, 'windows') }}:
- script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(crossArg) ci os ${{ parameters.osGroup }} ${{ parameters.archType }} $(buildConfigUpper) $(_nativeSanitizersArg) $(priorityArg) $(runtimeFlavorArgs) ${{ parameters.testBuildArgs }} $(runtimeVariantArg) $(librariesConfigurationArg)
displayName: Build Tests
+ env:
+ ${{ if eq(parameters.buildAllTestsAsStandalone, true) }}:
+ BuildAllTestsAsStandalone: true
diff --git a/eng/pipelines/common/templates/runtimes/build-test-job.yml b/eng/pipelines/common/templates/runtimes/build-test-job.yml
index bbc9f854801..4af04c54612 100644
--- a/eng/pipelines/common/templates/runtimes/build-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/build-test-job.yml
@@ -78,13 +78,13 @@ jobs:
- name: runtimeFlavorArgs
value: '-nativeaot'
- - name: testTreeFilterArg
+ - name: testFilterArg
value: ''
# Only build GCSimulator tests when the gc-simulator group is specified.
- ${{ if eq(parameters.testGroup, 'gc-simulator') }}:
- - name: testTreeFilterArg
- value: 'tree GC/Scenarios/GCSimulator'
+ - name: testFilterArg
+ value: 'test GC/Scenarios/GC-simulator.csproj'
- template: /eng/pipelines/common/templates/runtimes/native-test-assets-variables.yml
parameters:
@@ -110,7 +110,7 @@ jobs:
displayName: Disk Usage before Build
# Build managed test components
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(logRootNameArg)Managed allTargets skipnative skipgeneratelayout skiptestwrappers $(buildConfig) $(archType) $(runtimeFlavorArgs) $(crossArg) $(priorityArg) $(testTreeFilterArg) ci /p:TargetOS=AnyOS
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(logRootNameArg)Managed allTargets skipnative skipgeneratelayout skiptestwrappers $(buildConfig) $(archType) $(runtimeFlavorArgs) $(crossArg) $(priorityArg) $(testFilterArg) ci /p:TargetOS=AnyOS
displayName: Build managed test components
- ${{ if in(parameters.osGroup, 'osx', 'ios', 'tvos') }}:
diff --git a/eng/pipelines/common/templates/runtimes/run-test-job.yml b/eng/pipelines/common/templates/runtimes/run-test-job.yml
index 1b1bf6a0481..d31c473908d 100644
--- a/eng/pipelines/common/templates/runtimes/run-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/run-test-job.yml
@@ -119,13 +119,13 @@ jobs:
- name: LogNamePrefix
value: TestRunLogs_R2R_CG2_HotColdSplitting
- - name: testTreeFilterArg
+ - name: testFilterArg
value: ''
# Only build GCSimulator tests when the gc-simulator group is specified.
- ${{ if eq(parameters.testGroup, 'gc-simulator') }}:
- - name: testTreeFilterArg
- value: 'tree GC/Scenarios/GCSimulator'
+ - name: testFilterArg
+ value: 'test GC/Scenarios/GC-simulator.csproj'
- template: /eng/pipelines/common/templates/runtimes/native-test-assets-variables.yml
parameters:
@@ -232,7 +232,7 @@ jobs:
# and directly unzip them there after download). Unfortunately the logic to copy
# the native artifacts to the final test folders is dependent on availability of the
# managed test artifacts. This step also generates the final test execution scripts.
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) copynativeonly $(logRootNameArg)Native $(testTreeFilterArg) $(runtimeFlavorArgs) $(crossgenArg) $(buildConfig) $(archType) $(priorityArg) $(librariesOverrideArg) $(codeFlowEnforcementArg)
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) copynativeonly $(logRootNameArg)Native $(testFilterArg) $(runtimeFlavorArgs) $(crossgenArg) $(buildConfig) $(archType) $(priorityArg) $(librariesOverrideArg) $(codeFlowEnforcementArg)
displayName: Copy native test components to test output folder
@@ -244,7 +244,7 @@ jobs:
# Compose the Core_Root folder containing all artifacts needed for running
# CoreCLR tests. This step also compiles the framework using Crossgen2
# in ReadyToRun jobs.
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) generatelayoutonly $(logRootNameArg)Layout $(runtimeFlavorArgs) $(crossgenArg) $(buildConfig) $(archType) $(crossArg) $(priorityArg) $(librariesOverrideArg) $(runtimeVariantArg)
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) generatelayoutonly $(logRootNameArg)Layout $(runtimeFlavorArgs) $(crossgenArg) $(buildConfig) $(archType) $(crossArg) $(priorityArg) $(librariesOverrideArg) $(runtimeVariantArg) -ci
displayName: Generate CORE_ROOT
# Build a Mono LLVM AOT cross-compiler for non-amd64 targets (in this case, just arm64)
@@ -539,6 +539,7 @@ jobs:
scenarios:
- jitosr_stress
- jitpartialcompilation_pgo
+ - jitpartialcompilation_pgo_stress_random
- jitoptrepeat
- jitoldlayout
${{ else }}:
@@ -548,6 +549,7 @@ jobs:
- jit_stress_splitting
- jitpartialcompilation
- jitpartialcompilation_pgo
+ - jitpartialcompilation_pgo_stress_random
- jitobjectstackallocation
- jitphysicalpromotion_only
- jitphysicalpromotion_full
@@ -593,6 +595,7 @@ jobs:
- script: $(PythonSetupScript)
displayName: Enable python venv
+ condition: always()
- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py merge-mch -log_level DEBUG -pattern $(MchFilesLocation)$(CollectionName).$(CollectionType)*.mch -output_mch_path $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch
displayName: 'Merge $(CollectionName)-$(CollectionType) SuperPMI collections'
@@ -609,28 +612,29 @@ jobs:
displayName: 'Upload artifacts SuperPMI $(CollectionName)-$(CollectionType) collection'
condition: always()
- # Add authenticated pip feed
- - task: PipAuthenticate@1
- displayName: 'Pip Authenticate'
- inputs:
- artifactFeeds: public/dotnet-public-pypi
- onlyAddExtraIndex: false
- condition: always()
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ # Add authenticated pip feed
+ - task: PipAuthenticate@1
+ displayName: 'Pip Authenticate'
+ inputs:
+ artifactFeeds: public/dotnet-public-pypi
+ onlyAddExtraIndex: false
+ condition: always()
- # Ensure the Python azure-storage-blob package is installed before doing the upload.
- - script: $(PipScript) install --upgrade pip && $(PipScript) install azure.storage.blob==12.5.0 --force-reinstall && $(PipScript) install azure.identity==1.16.1 --force-reinstall
- displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
- condition: always()
+ # Ensure the Python azure-storage-blob package is installed before doing the upload.
+ - script: $(PipScript) install --upgrade pip && $(PipScript) install azure.storage.blob==12.5.0 --force-reinstall && $(PipScript) install azure.identity==1.16.1 --force-reinstall
+ displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
+ condition: always()
- - task: AzureCLI@2
- displayName: 'Upload SuperPMI $(CollectionName)-$(CollectionType) collection to Azure Storage'
- inputs:
- azureSubscription: 'superpmi-collect-rw'
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
- condition: always()
+ - task: AzureCLI@2
+ displayName: 'Upload SuperPMI $(CollectionName)-$(CollectionType) collection to Azure Storage'
+ inputs:
+ azureSubscription: 'superpmi-collect-rw'
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
+ condition: always()
- task: CopyFiles@2
displayName: Copying superpmi.log of all partitions
diff --git a/eng/pipelines/common/variables.yml b/eng/pipelines/common/variables.yml
index 793b66bb660..d51d565362e 100644
--- a/eng/pipelines/common/variables.yml
+++ b/eng/pipelines/common/variables.yml
@@ -58,6 +58,4 @@ variables:
ne(variables['isExtraPlatformsBuild'], true),
eq(variables['isRollingBuild'], true))) ]
-- template: /eng/pipelines/common/perf-variables.yml
-
-- template: /eng/common/${{ parameters.templatePath }}/variables/pool-providers.yml
\ No newline at end of file
+- template: /eng/common/${{ parameters.templatePath }}/variables/pool-providers.yml
diff --git a/eng/pipelines/common/xplat-setup.yml b/eng/pipelines/common/xplat-setup.yml
index f50a2db9e81..6b1804c9afc 100644
--- a/eng/pipelines/common/xplat-setup.yml
+++ b/eng/pipelines/common/xplat-setup.yml
@@ -172,10 +172,10 @@ jobs:
${{ if and(in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator'), eq(variables['System.TeamProject'], 'public')) }}:
vmImage: 'macos-12'
- # OSX Internal Pool
+ # Official build OSX pool
${{ if and(in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator'), ne(variables['System.TeamProject'], 'public')) }}:
name: "Azure Pipelines"
- vmImage: 'macOS-12'
+ vmImage: 'macos-latest-internal'
os: macOS
# Official Build Windows Pool
diff --git a/eng/pipelines/coreclr/exploratory.yml b/eng/pipelines/coreclr/exploratory.yml
index 2ba84590f5e..7b094398a0f 100644
--- a/eng/pipelines/coreclr/exploratory.yml
+++ b/eng/pipelines/coreclr/exploratory.yml
@@ -40,7 +40,7 @@ extends:
buildArgs: -s clr+libs -c $(_BuildConfig) -lc Release
timeoutInMinutes: 360
postBuildSteps:
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(_BuildConfig) $(archType) generatelayoutonly
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(_BuildConfig) $(archType) $(crossArg) generatelayoutonly /p:UsePublishedCrossgen2=false
displayName: Create Core_Root
condition: succeeded()
- template: /eng/pipelines/coreclr/templates/jit-exploratory-steps.yml
diff --git a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
index 8419e99f7ba..f750bda72f9 100644
--- a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
@@ -1,3 +1,6 @@
+parameters:
+ perfBranch: 'main'
+
jobs:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'Schedule')) }}:
@@ -100,6 +103,7 @@ jobs:
- template: /eng/pipelines/coreclr/templates/build-and-run-perf-ios-scenarios.yml
parameters:
hybridGlobalization: True
+ perfBranch: ${{ parameters.perfBranch }}
# run android scenarios
- template: /eng/pipelines/common/platform-matrix.yml
@@ -112,10 +116,11 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: AndroidMono
- projectFile: android_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/android_scenarios.proj
runKind: android_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfpixel4a'
+ perfBranch: ${{ parameters.perfBranch }}
# run mono microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -129,10 +134,11 @@ jobs:
testGroup: perf
liveLibrariesBuildConfig: Release
runtimeType: mono
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro_mono
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
+ perfBranch: ${{ parameters.perfBranch }}
# run mono interpreter perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -147,10 +153,11 @@ jobs:
liveLibrariesBuildConfig: Release
runtimeType: mono
codeGenType: 'Interpreter'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro_mono
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
+ perfBranch: ${{ parameters.perfBranch }}
# run mono aot microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -165,10 +172,11 @@ jobs:
liveLibrariesBuildConfig: Release
runtimeType: mono
codeGenType: 'AOT'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro_mono
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perftiger microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -184,10 +192,11 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perftiger microbenchmarks no dynamic pgo perf jobs
- template: /eng/pipelines/common/platform-matrix.yml
@@ -200,11 +209,12 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
pgoRunType: -NoDynamicPGO
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -216,11 +226,12 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
pgoRunType: --nodynamicpgo
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perftiger microbenchmarks no R2R perf jobs
- template: /eng/pipelines/common/platform-matrix.yml
@@ -233,11 +244,12 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
r2rRunType: -NoR2R
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -249,11 +261,12 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
r2rRunType: --nor2r
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perfowl microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -267,10 +280,11 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfowl'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perfviper microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -284,10 +298,11 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfviper'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr perfviper microbenchmarks perf jitoptrepeat jobs
- template: /eng/pipelines/common/platform-matrix.yml
@@ -301,11 +316,12 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfviper'
experimentName: 'jitoptrepeat'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr crossgen perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -319,10 +335,11 @@ jobs:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: crossgen_perf.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/crossgen_perf.proj
runKind: crossgen_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perftiger_crossgen'
+ perfBranch: ${{ parameters.perfBranch }}
# build mono runtime packs
- template: /eng/pipelines/common/platform-matrix.yml
@@ -368,3 +385,4 @@ jobs:
archiveExtension: '.tar.gz'
archiveType: tar
tarCompression: gz
+ perfBranch: ${{ parameters.perfBranch }}
diff --git a/eng/pipelines/coreclr/perf-wasm-jobs.yml b/eng/pipelines/coreclr/perf-wasm-jobs.yml
index 3f33e90cbc8..93ffdfc5283 100644
--- a/eng/pipelines/coreclr/perf-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-wasm-jobs.yml
@@ -7,7 +7,7 @@ parameters:
onlySanityCheck: false
downloadSpecificBuild: null # { buildId, pipeline, branchName, project }
collectHelixLogsScript: ''
- perfForkToUse: {} # url, branch
+ perfBranch: 'main'
jobs:
@@ -43,14 +43,14 @@ jobs:
liveLibrariesBuildConfig: Release
runtimeType: wasm
codeGenType: 'wasm'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
javascriptEngine: 'javascriptcore'
collectHelixLogsScript: ${{ parameters.collectHelixLogsScript }}
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
+ perfBranch: ${{ parameters.perfBranch }}
#downloadSpecificBuild:
#buildId: '1693181'
#pipeline: 'perf-wasm'
@@ -69,14 +69,14 @@ jobs:
livelibrariesbuildconfig: Release
runtimetype: wasm
codegentype: 'aot'
- projectfile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runkind: micro
runjobtemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
javascriptengine: 'javascriptcore'
collectHelixLogsScript: ${{ parameters.collectHelixLogsScript }}
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
+ perfBranch: ${{ parameters.perfBranch }}
- ${{ if eq(parameters.runProfile, 'v8') }}:
- ${{ if eq(parameters.downloadSpecificBuild.buildId, '') }}:
@@ -110,7 +110,7 @@ jobs:
liveLibrariesBuildConfig: Release
runtimeType: wasm
codeGenType: 'wasm'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
@@ -121,7 +121,7 @@ jobs:
compare: ${{ parameters.compare }}
onlySanityCheck: ${{ parameters.onlySanityCheck }}
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
+ perfBranch: ${{ parameters.perfBranch }}
#run mono wasm aot microbenchmarks perf job
# Disabled for runtime-wasm-perf on PRs due to https://github.com/dotnet/runtime/issues/95101
@@ -138,7 +138,7 @@ jobs:
livelibrariesbuildconfig: Release
runtimetype: wasm
codegentype: 'aot'
- projectfile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runkind: micro
runjobtemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perftiger'
@@ -149,7 +149,7 @@ jobs:
compare: ${{ parameters.compare }}
onlySanityCheck: ${{ parameters.onlySanityCheck }}
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
+ perfBranch: ${{ parameters.perfBranch }}
# run mono wasm blazor perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -163,14 +163,14 @@ jobs:
testGroup: perf
liveLibrariesBuildConfig: Release
runtimeType: wasm
- projectFile: blazor_perf.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/blazor_perf.proj
runKind: blazor_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
# For working with a newer sdk, and previous tfm (eg. 9.0 sdk, and net8.0 tfm)
- #additionalSetupParameters: '--dotnetversions 8.0.0' # passed to performance-setup.sh
+ #additionalSetupParameters: '--dotnetversions 8.0.0' # passed to run-performance-job.py
logicalmachine: 'perftiger'
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
+ perfBranch: ${{ parameters.perfBranch }}
- ${{if or(and(ne(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'Schedule')), in(variables['Build.DefinitionName'], 'runtime-wasm-perf')) }}:
# run mono wasm blazor perf job
@@ -185,12 +185,12 @@ jobs:
testGroup: perf
liveLibrariesBuildConfig: Release
runtimeType: wasm
- projectFile: blazor_perf.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/blazor_perf.proj
runKind: blazor_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
# For working with a newer sdk, and previous tfm (eg. 9.0 sdk, and net8.0 tfm)
- #additionalSetupParameters: '--dotnetversions 8.0.0' # passed to performance-setup.sh
+ #additionalSetupParameters: '--dotnetversions 8.0.0' # passed to run-performance-job.py
logicalmachine: 'perftiger'
downloadSpecificBuild: ${{ parameters.downloadSpecificBuild }}
- perfForkToUse: ${{ parameters.perfForkToUse }}
hybridGlobalization: True
+ perfBranch: ${{ parameters.perfBranch }}
diff --git a/eng/pipelines/coreclr/perf.yml b/eng/pipelines/coreclr/perf.yml
index 3d7ee810670..9b595f3dd8c 100644
--- a/eng/pipelines/coreclr/perf.yml
+++ b/eng/pipelines/coreclr/perf.yml
@@ -1,3 +1,9 @@
+parameters:
+- name: perfBranch
+ displayName: Performance Repo Branch
+ type: string
+ default: 'main'
+
trigger:
batch: true
branches:
@@ -19,7 +25,7 @@ trigger:
- THIRD-PARTY-NOTICES.TXT
variables:
- - template: /eng/pipelines/common/variables.yml
+- template: /eng/pipelines/common/variables.yml
#
# For the 'schedule' case, only wasm/jsc perf jobs are run.
@@ -46,9 +52,12 @@ extends:
- template: /eng/pipelines/coreclr/perf-wasm-jobs.yml
parameters:
collectHelixLogsScript: ${{ variables._wasmCollectHelixLogsScript }}
+ perfBranch: ${{ parameters.perfBranch }}
#${{ and(ne(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'Schedule')) }}:
# runProfile: 'non-v8'
${{ if ne(variables['System.TeamProject'], 'public') }}:
runProfile: 'v8'
- template: /eng/pipelines/coreclr/perf-non-wasm-jobs.yml
+ parameters:
+ perfBranch: ${{ parameters.perfBranch }}
diff --git a/eng/pipelines/coreclr/perf_slow.yml b/eng/pipelines/coreclr/perf_slow.yml
index 6bdeb08b1e3..e7d073534a7 100644
--- a/eng/pipelines/coreclr/perf_slow.yml
+++ b/eng/pipelines/coreclr/perf_slow.yml
@@ -5,6 +5,10 @@ parameters:
- name: runScheduledJobs
type: boolean
default: false
+- name: perfBranch
+ displayName: Performance Repo Branch
+ type: string
+ default: 'main'
trigger:
batch: true
@@ -80,11 +84,31 @@ extends:
liveLibrariesBuildConfig: Release
runtimeType: mono
codeGenType: 'Interpreter'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro_mono
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 720
+ perfBranch: ${{ parameters.perfBranch }}
+
+ # run arm64 jit jobs for mono
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/perf-job.yml
+ buildConfig: release
+ runtimeFlavor: mono
+ platforms:
+ - linux_arm64
+ jobParameters:
+ testGroup: perf
+ liveLibrariesBuildConfig: Release
+ runtimeType: mono
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
+ runKind: micro_mono
+ runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
+ logicalmachine: 'perfampere'
+ timeoutInMinutes: 720
+ perfBranch: ${{ parameters.perfBranch }}
- ${{ if and(ne(variables['System.TeamProject'], 'public'), or(notin(variables['Build.Reason'], 'Schedule', 'Manual'), parameters.runPrivateJobs)) }}:
@@ -149,11 +173,12 @@ extends:
liveLibrariesBuildConfig: Release
runtimeType: mono
codeGenType: 'AOT'
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro_mono
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 780
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr Linux arm64 ampere microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -166,11 +191,12 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 780
+ perfBranch: ${{ parameters.perfBranch }}
#run coreclr Linux arm64 ampere no dynamic pgo microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -183,12 +209,13 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 780
pgoRunType: --nodynamicpgo
+ perfBranch: ${{ parameters.perfBranch }}
#run coreclr Linux arm64 ampere no R2R microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -201,12 +228,13 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 780
r2rRunType: --nor2r
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr Windows arm64 microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -219,10 +247,11 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfsurf'
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr Windows arm64 ampere microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -235,11 +264,12 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
timeoutInMinutes: 780
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr Windows arm64 ampere no dynamic pgo microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -252,12 +282,13 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
pgoRunType: -NoDynamicPGO
timeoutInMinutes: 780
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr Windows arm64 ampere no R2R microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -270,12 +301,13 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'perfampere'
r2rRunType: -NoR2R
timeoutInMinutes: 780
+ perfBranch: ${{ parameters.perfBranch }}
# run coreclr cloudvm microbenchmarks perf job
# this run is added temporarily for measuring AVX-512 performance
@@ -290,10 +322,11 @@ extends:
jobParameters:
testGroup: perf
liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
+ projectFile: $(Agent.BuildDirectory)/performance/eng/performance/helix.proj
runKind: micro
runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
logicalmachine: 'cloudvm'
+ perfBranch: ${{ parameters.perfBranch }}
# Uncomment once we fix https://github.com/dotnet/performance/issues/1950
# # run coreclr linux crossgen perf job
@@ -307,10 +340,11 @@ extends:
# jobParameters:
# testGroup: perf
# liveLibrariesBuildConfig: Release
- # projectFile: crossgen_perf.proj
+ # projectFile: $(Build.SourcesDirectory)/eng/testing/performance/crossgen_perf.proj
# runKind: crossgen_scenarios
# runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
# logicalmachine: 'perfa64'
+ # perfBranch: ${{ parameters.perfBranch }}
# # run coreclr windows crossgen perf job
# - template: /eng/pipelines/common/platform-matrix.yml
@@ -323,7 +357,8 @@ extends:
# jobParameters:
# testGroup: perf
# liveLibrariesBuildConfig: Release
- # projectFile: crossgen_perf.proj
+ # projectFile: $(Build.SourcesDirectory)/eng/testing/performance/crossgen_perf.proj
# runKind: crossgen_scenarios
# runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
# logicalmachine: 'perfsurf'
+ # perfBranch: ${{ parameters.perfBranch }}
diff --git a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
index 494601a890d..c39c6d81a45 100644
--- a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
+++ b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
@@ -156,7 +156,6 @@ extends:
#
# CoreCLR NativeAOT checked build and Pri0 tests
- # Only when CoreCLR is changed
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -189,3 +188,37 @@ extends:
parameters:
testGroup: innerloop
liveLibrariesBuildConfig: Release
+
+ #
+ # CoreCLR NativeAOT checked build and Pri0 tests
+ # Test windows_x64 with CET and CFG
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ helixQueueGroup: cet
+ buildConfig: Checked
+ platforms:
+ - windows_x64
+ variables:
+ - name: timeoutPerTestInMinutes
+ value: 60
+ - name: timeoutPerTestCollectionInMinutes
+ value: 180
+ jobParameters:
+ timeoutInMinutes: 300 # doesn't normally take this long, but we have had Helix queues backed up for over an hour
+ nameSuffix: NativeAOT_Pri0_CET_CFG
+ buildArgs: -s clr.aot+libs -rc $(_BuildConfig) -lc Release /p:RunAnalyzers=false
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: 'nativeaot /p:IlcUseServerGc=false /p:ControlFlowGuard=Guard'
+ liveLibrariesBuildConfig: Release
+ testRunNamePrefixSuffix: NativeAOT_Pri0_CET_CFG_$(_BuildConfig)
+ extraVariablesTemplates:
+ - template: /eng/pipelines/common/templates/runtimes/test-variables.yml
+ parameters:
+ testGroup: innerloop
+ liveLibrariesBuildConfig: Release
\ No newline at end of file
diff --git a/eng/pipelines/coreclr/superpmi-collect-test.yml b/eng/pipelines/coreclr/superpmi-collect-test.yml
new file mode 100644
index 00000000000..863a79c1b31
--- /dev/null
+++ b/eng/pipelines/coreclr/superpmi-collect-test.yml
@@ -0,0 +1,373 @@
+# This job definition automates the SuperPMI collection process.
+
+trigger: none
+
+variables:
+ - template: /eng/pipelines/common/variables.yml
+
+extends:
+ template: /eng/pipelines/common/templates/pipeline-with-resources.yml
+ parameters:
+ stages:
+ - stage: Build
+ jobs:
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: checked
+ platforms:
+ - windows_x64
+ - linux_x64
+ jobParameters:
+ testGroup: outerloop
+ buildArgs: -s clr+libs+libs.tests -rc $(_BuildConfig) -c Release /p:ArchiveTests=true
+ timeoutInMinutes: 120
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/build-native-test-assets-step.yml
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/bin
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/helix
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: LibrariesTestArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ extraVariablesTemplates:
+ - template: /eng/pipelines/common/templates/runtimes/native-test-assets-variables.yml
+ parameters:
+ testGroup: outerloop
+ disableComponentGovernance: true # No shipping artifacts produced by this pipeline
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: checked
+ platforms:
+ - windows_x86
+ - windows_arm64
+ - osx_arm64
+ jobParameters:
+ testGroup: outerloop
+ buildArgs: -s clr+libs+libs.tests -rc $(_BuildConfig) -c Release /p:ArchiveTests=true
+ timeoutInMinutes: 120
+ postBuildSteps:
+ # Build CLR assets for x64 as well as the target as we need an x64 mcs
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s clr.spmi -c $(_BuildConfig)
+ archParameter: -arch x64
+ displayName: Build SuperPMI
+ - template: /eng/pipelines/coreclr/templates/build-native-test-assets-step.yml
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/bin
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/helix
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: LibrariesTestArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ extraVariablesTemplates:
+ - template: /eng/pipelines/common/templates/runtimes/native-test-assets-variables.yml
+ parameters:
+ testGroup: outerloop
+ disableComponentGovernance: true # No shipping artifacts produced by this pipeline
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: checked
+ platforms:
+ - linux_arm
+ - linux_arm64
+ jobParameters:
+ testGroup: outerloop
+ buildArgs: -s clr+libs+libs.tests -rc $(_BuildConfig) -c Release /p:ArchiveTests=true
+ timeoutInMinutes: 120
+ postBuildSteps:
+ # Build CLR assets for x64 as well as the target as we need an x64 mcs
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s clr.spmi -c $(_BuildConfig)
+ archParameter: -arch x64
+ container: linux_x64
+ displayName: Build SuperPMI
+ - template: /eng/pipelines/coreclr/templates/build-native-test-assets-step.yml
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/bin
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: $(Build.SourcesDirectory)/artifacts/helix
+ includeRootFolder: false
+ archiveType: $(archiveType)
+ archiveExtension: $(archiveExtension)
+ tarCompression: $(tarCompression)
+ artifactName: LibrariesTestArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+ extraVariablesTemplates:
+ - template: /eng/pipelines/common/templates/runtimes/native-test-assets-variables.yml
+ parameters:
+ testGroup: outerloop
+ disableComponentGovernance: true # No shipping artifacts produced by this pipeline
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/templates/runtimes/build-test-job.yml
+ buildConfig: checked
+ platforms:
+ - CoreClrTestBuildHost # Either osx_x64 or linux_x64
+ jobParameters:
+ testGroup: outerloop
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: pmi
+ collectionName: libraries
+ collectionUpload: false
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: crossgen2
+ collectionName: libraries
+ collectionUpload: false
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: run
+ collectionName: realworld
+ collectionUpload: false
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: run
+ collectionName: benchmarks
+ collectionUpload: false
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: run_tiered
+ collectionName: benchmarks
+ collectionUpload: false
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: run_pgo
+ collectionName: benchmarks
+ collectionUpload: false
+
+ #
+ # Collection of coreclr test run
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/templates/runtimes/run-test-job.yml
+ buildConfig: checked
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: superpmi
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ SuperPmiCollect: true
+ SuperPmiCollectionUpload: false
+ unifiedArtifactsName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_$(_BuildConfig)
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+ buildConfig: checked
+ platforms:
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_arm64
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testGroup: outerloop
+ liveLibrariesBuildConfig: Release
+ collectionType: nativeaot
+ collectionName: smoke_tests
+ collectionUpload: false
+
+ #
+ # Collection of libraries test run: normal
+ # Libraries Test Run using Release libraries, and Checked CoreCLR
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/libraries/run-test-job.yml
+ buildConfig: Release
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: superpmi
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testScope: innerloop
+ liveRuntimeBuildConfig: Checked
+ dependsOnTestBuildConfiguration: Release
+ dependsOnTestArchitecture: x64
+ scenarios:
+ - normal
+ SuperPmiCollect: true
+ SuperPmiCollectionName: libraries_tests
+ SuperPmiCollectionUpload: false
+ unifiedArtifactsName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_Checked
+ helixArtifactsName: LibrariesTestArtifacts_$(osGroup)$(osSubgroup)_$(archType)_Checked
+ unifiedBuildConfigOverride: checked
+
+ #
+ # Collection of libraries test run: no_tiered_compilation
+ # Libraries Test Run using Release libraries, and Checked CoreCLR
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/libraries/run-test-job.yml
+ buildConfig: Release
+ platforms:
+ - osx_arm64
+ - linux_arm
+ - linux_arm64
+ - linux_x64
+ - windows_x64
+ - windows_x86
+ - windows_arm64
+ helixQueueGroup: superpmi
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ testScope: innerloop
+ liveRuntimeBuildConfig: Checked
+ dependsOnTestBuildConfiguration: Release
+ dependsOnTestArchitecture: x64
+ scenarios:
+ - no_tiered_compilation
+ SuperPmiCollect: true
+ SuperPmiCollectionName: libraries_tests_no_tiered_compilation
+ SuperPmiCollectionUpload: false
+ unifiedArtifactsName: BuildArtifacts_$(osGroup)$(osSubgroup)_$(archType)_Checked
+ helixArtifactsName: LibrariesTestArtifacts_$(osGroup)$(osSubgroup)_$(archType)_Checked
+ unifiedBuildConfigOverride: checked
diff --git a/eng/pipelines/coreclr/superpmi-collect.yml b/eng/pipelines/coreclr/superpmi-collect.yml
index ea5b57d5f51..3530ff5a958 100644
--- a/eng/pipelines/coreclr/superpmi-collect.yml
+++ b/eng/pipelines/coreclr/superpmi-collect.yml
@@ -86,6 +86,7 @@ extends:
parameters:
buildArgs: -s clr.spmi -c $(_BuildConfig)
archParameter: -arch x64
+ displayName: Build SuperPMI
- template: /eng/pipelines/coreclr/templates/build-native-test-assets-step.yml
- template: /eng/pipelines/common/upload-artifact-step.yml
parameters:
@@ -127,6 +128,7 @@ extends:
buildArgs: -s clr.spmi -c $(_BuildConfig)
archParameter: -arch x64
container: linux_x64
+ displayName: Build SuperPMI
- template: /eng/pipelines/coreclr/templates/build-native-test-assets-step.yml
- template: /eng/pipelines/common/upload-artifact-step.yml
parameters:
diff --git a/eng/pipelines/coreclr/templates/build-and-run-perf-ios-scenarios.yml b/eng/pipelines/coreclr/templates/build-and-run-perf-ios-scenarios.yml
index 2d484c8e15e..f9513e556f8 100644
--- a/eng/pipelines/coreclr/templates/build-and-run-perf-ios-scenarios.yml
+++ b/eng/pipelines/coreclr/templates/build-and-run-perf-ios-scenarios.yml
@@ -1,5 +1,6 @@
parameters:
hybridGlobalization: True
+ perfBranch: 'main'
jobs:
# build mono iOS scenarios HybridGlobalization
@@ -61,13 +62,14 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSMono
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSLlvmBuild: False
iOSStripSymbols: False
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -79,13 +81,14 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSMono
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSLlvmBuild: False
iOSStripSymbols: True
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -97,13 +100,14 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSMono
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSLlvmBuild: True
iOSStripSymbols: False
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -115,13 +119,14 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSMono
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSLlvmBuild: True
iOSStripSymbols: True
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
# run NativeAOT iOS scenarios HybridGlobalization
- template: /eng/pipelines/common/platform-matrix.yml
@@ -134,12 +139,13 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSNativeAOT
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSStripSymbols: False
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -151,9 +157,10 @@ jobs:
jobParameters:
testGroup: perf
runtimeType: iOSNativeAOT
- projectFile: ios_scenarios.proj
+ projectFile: $(Build.SourcesDirectory)/eng/testing/performance/ios_scenarios.proj
runKind: ios_scenarios
runJobTemplate: /eng/pipelines/coreclr/templates/run-scenarios-job.yml
logicalmachine: 'perfiphone12mini'
iOSStripSymbols: True
hybridGlobalization: ${{ parameters.hybridGlobalization }}
+ perfBranch: ${{ parameters.perfBranch }}
diff --git a/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml b/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
index 8b02b739faf..e50bf8963b5 100644
--- a/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
+++ b/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
@@ -87,7 +87,7 @@ steps:
curl -o NuGet.config 'https://raw.githubusercontent.com/dotnet/maui/${{parameters.framework}}/NuGet.config'
curl -o dotnet-install.sh 'https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh'
curl -Lo performance-version-details.xml 'https://raw.githubusercontent.com/dotnet/performance/${{parameters.perfBranch}}/eng/Version.Details.xml'
- version=$(sed -nr 's/[[:space:]]*-
- export RestoreAdditionalProjectSources=$HELIX_CORRELATION_PAYLOAD/built-nugets &&
- sudo apt-get -y remove nodejs &&
- sudo apt-get update &&
- sudo apt-get install -y ca-certificates curl gnupg &&
- sudo mkdir -p /etc/apt/keyrings &&
- sudo rm -f /etc/apt/keyrings/nodesource.gpg &&
- curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | sudo gpg --dearmor --batch -o /etc/apt/keyrings/nodesource.gpg &&
- export NODE_MAJOR=18 &&
- echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | sudo tee /etc/apt/sources.list.d/nodesource.list &&
- sudo apt-get update &&
- sudo apt-get install nodejs -y &&
- test -n "$(V8Version)" &&
- npm install --prefix $HELIX_WORKITEM_PAYLOAD jsvu -g &&
- $HELIX_WORKITEM_PAYLOAD/bin/jsvu --os=linux64 v8@$(V8Version) &&
- export V8_ENGINE_PATH=~/.jsvu/bin/v8-$(V8Version) &&
- ${V8_ENGINE_PATH} -e 'console.log(`V8 version: ${this.version()}`)'
- - ${{ if ne(parameters.runtimeType, 'wasm') }}:
- - HelixPreCommandsWasmOnLinux: echo
- - HelixPreCommandStemWindows: 'set ORIGPYPATH=%PYTHONPATH%;py -m pip install -U pip;py -3 -m venv %HELIX_WORKITEM_PAYLOAD%\.venv;call %HELIX_WORKITEM_PAYLOAD%\.venv\Scripts\activate.bat;echo on;set PYTHONPATH=;python -m pip install -U pip;python -m pip install urllib3==1.26.15;python -m pip install azure.storage.blob==12.13.0;python -m pip install azure.storage.queue==12.4.0;python -m pip install azure.identity==1.16.1;set "PERFLAB_UPLOAD_TOKEN=$(HelixPerfUploadTokenValue)"'
- - HelixPreCommandStemLinux: >-
- export ORIGPYPATH=$PYTHONPATH
- export CRYPTOGRAPHY_ALLOW_OPENSSL_102=true;
- echo "** Installing prerequistes **";
- echo "** Waiting for dpkg to unlock (up to 2 minutes) **" &&
- timeout 2m bash -c 'while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; do if [ -z "$printed" ]; then echo "Waiting for dpkg lock to be released... Lock is held by: $(ps -o cmd= -p $(sudo fuser /var/lib/dpkg/lock-frontend))"; printed=1; fi; echo "Waiting 5 seconds to check again"; sleep 5; done;' &&
- sudo apt-get remove -y lttng-modules-dkms &&
- sudo apt-get -y install python3-pip &&
- python3 -m pip install --user -U pip &&
- sudo apt-get -y install python3-venv &&
- python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv &&
- ls -l $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate &&
- export PYTHONPATH= &&
- python3 -m pip install --user -U pip &&
- pip3 install urllib3==1.26.15 &&
- pip3 install --user azure.storage.blob==12.13.0 &&
- pip3 install --user azure.storage.queue==12.4.0 &&
- pip3 install --user azure.identity==1.16.1 &&
- sudo apt-get update &&
- sudo apt -y install curl dirmngr apt-transport-https lsb-release ca-certificates &&
- $(HelixPreCommandsWasmOnLinux) &&
- export PERFLAB_UPLOAD_TOKEN="$(HelixPerfUploadTokenValue)"
- || export PERF_PREREQS_INSTALL_FAILED=1;
- test "x$PERF_PREREQS_INSTALL_FAILED" = "x1" && echo "** Error: Failed to install prerequites **"
- - HelixPreCommandStemMusl: 'ulimit -n 4096;export ORIGPYPATH=$PYTHONPATH;sudo apk add icu-libs krb5-libs libgcc libintl libssl1.1 libstdc++ zlib cargo;sudo apk add libgdiplus --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing; python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install -U pip;pip3 install urllib3==1.26.15;pip3 install azure.storage.blob==12.13.0;pip3 install azure.storage.queue==12.4.0;pip3 install azure.identity==1.16.1;export PERFLAB_UPLOAD_TOKEN="$(HelixPerfUploadTokenValue)"'
- - ExtraMSBuildLogsWindows: 'set MSBUILDDEBUGCOMM=1;set "MSBUILDDEBUGPATH=%HELIX_WORKITEM_UPLOAD_ROOT%"'
- - ExtraMSBuildLogsLinux: 'export MSBUILDDEBUGCOMM=1;export "MSBUILDDEBUGPATH=$HELIX_WORKITEM_UPLOAD_ROOT"'
- - HelixPreCommand: ''
- - HelixPostCommand: ''
- - Interpreter: ''
+ - name: internalParam
+ value: ''
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq( parameters.osGroup, 'windows') }}:
- - HelixPreCommand: $(HelixPreCommandStemWindows);$(ExtraMSBuildLogsWindows)
- - HelixPostCommand: 'set PYTHONPATH=%ORIGPYPATH%'
- - IsInternal: -Internal
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - ${{ if eq(parameters.osSubGroup, '_musl') }}:
- - HelixPreCommand: $(HelixPreCommandStemMusl);$(ExtraMSBuildLogsLinux)
- - HelixPostCommand: 'export PYTHONPATH=$ORIGPYPATH'
- - IsInternal: --internal
- - ${{ if ne(parameters.osSubGroup, '_musl') }}:
- - HelixPreCommand: $(HelixPreCommandStemLinux);$(ExtraMSBuildLogsLinux)
- - HelixPostCommand: 'export PYTHONPATH=$ORIGPYPATH;${{ parameters.collectHelixLogsScript }}'
- - IsInternal: --internal
+ - name: internalParam
+ value: --internal
- group: DotNet-HelixApi-Access
- group: dotnet-benchview
- - ${{ if not(and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'))) }}:
- - ${{ if eq( parameters.osGroup, 'windows') }}:
- - HelixPreCommand: $(HelixPreCommandStemWindows);$(ExtraMSBuildLogsWindows)
- - HelixPostCommand: 'export PYTHONPATH=$ORIGPYPATH;${{ parameters.collectHelixLogsScript }}'
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - HelixPreCommand: $(HelixPreCommandStemLinux);$(ExtraMSBuildLogsLinux);
- - HelixPostCommand: 'export PYTHONPATH=$ORIGPYPATH;${{ parameters.collectHelixLogsScript }}'
-
- - ${{ if and(eq(parameters.codeGenType, 'Interpreter'), eq(parameters.runtimeType, 'mono')) }}:
- - ${{ if eq( parameters.osGroup, 'windows') }}:
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - HelixPreCommand: '$(HelixPreCommandStemWindows);set MONO_ENV_OPTIONS="--interpreter";$(ExtraMSBuildLogsWindows)'
- - Interpreter: ' -MonoInterpreter'
- - ${{ if not(and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'))) }}:
- - HelixPreCommand: 'set MONO_ENV_OPTIONS="--interpreter";$(ExtraMSBuildLogsWindows)'
- - Interpreter: ' -MonoInterpreter'
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - HelixPreCommand: '$(HelixPreCommandStemLinux);export MONO_ENV_OPTIONS="--interpreter";$(ExtraMSBuildLogsLinux)'
- - Interpreter: ' --monointerpreter'
- - ${{ if not(and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'))) }}:
- - HelixPreCommand: 'export MONO_ENV_OPTIONS="--interpreter";$(ExtraMSBuildLogsLinux)'
- - Interpreter: ' --monointerpreter'
-
workspace:
clean: all
pool:
@@ -172,28 +78,14 @@ jobs:
_Framework: ${{ framework }}
steps:
- ${{ parameters.steps }}
- - powershell: $(Build.SourcesDirectory)\eng\testing\performance\performance-setup.ps1 $(IsInternal)$(Interpreter) -Framework $(_Framework) -Kind ${{ parameters.runKind }} -LogicalMachine ${{ parameters.logicalMachine }} ${{ parameters.pgoRunType }} ${{ parameters.physicalPromotionRunType }} ${{ parameters.r2rRunType }} -ExperimentName '${{ parameters.experimentName }}' -UseLocalCommitTime ${{ parameters.extraSetupParameters }}
- displayName: Performance Setup (Windows)
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $(Build.SourcesDirectory)/eng/testing/performance/performance-setup.sh $(IsInternal)$(Interpreter) --framework $(_Framework) --kind ${{ parameters.runKind }} --logicalmachine ${{ parameters.logicalMachine }} ${{ parameters.pgoRunType }} ${{ parameters.physicalPromotionRunType }} ${{ parameters.r2rRunType }} --experimentname '${{ parameters.experimentName }}' --uselocalcommittime ${{ parameters.extraSetupParameters }}
- displayName: Performance Setup (Unix)
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments) ${{ parameters.additionalSetupParameters }}
- displayName: Run ci setup script
+ - script: $(Python) $(Agent.BuildDirectory)/performance/scripts/run_performance_job.py --framework $(_Framework) $(internalParam) ${{ parameters.extraSetupParameters }} ${{ parameters.additionalSetupParameters }}
+ displayName: Run performance job script
+ env:
+ HelixAccessToken: '$(HelixApiAccessToken)'
+ PerfCommandUploadToken: '$(PerfCommandUploadToken)'
+ PerfCommandUploadTokenLinux: '$(PerfCommandUploadTokenLinux)'
# Run perf testing in helix
- template: /eng/pipelines/coreclr/templates/perf-send-to-helix.yml
parameters:
- HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)${{ parameters.helixTypeSuffix }}'
- HelixAccessToken: $(HelixApiAccessToken)
- HelixTargetQueues: $(Queue)
- HelixPreCommands: $(HelixPreCommand)
- HelixPostCommands: $(HelixPostCommand)
- Creator: $(Creator)
- WorkItemTimeout: 4:00 # 4 hours
- WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
- CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
ProjectFile: ${{ parameters.projectFile }}
osGroup: ${{ parameters.osGroup }}
diff --git a/eng/pipelines/coreclr/templates/run-scenarios-job.yml b/eng/pipelines/coreclr/templates/run-scenarios-job.yml
index e58dd3586e7..2bb05f14835 100644
--- a/eng/pipelines/coreclr/templates/run-scenarios-job.yml
+++ b/eng/pipelines/coreclr/templates/run-scenarios-job.yml
@@ -48,62 +48,15 @@ jobs:
- ${{ each variable in parameters.variables }}:
- ${{insert}}: ${{ variable }}
- - IsInternal: ''
- - HelixApiAccessToken: ''
- - SharedHelixPreCommands: ''
- - AdditionalHelixPreCommands: ''
- - AdditionalHelixPostCommands: ''
- # run machine-setup and set PYTHONPATH for both public and private jobs
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- - SharedHelixPreCommands: 'call %HELIX_WORKITEM_PAYLOAD%\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%'
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - SharedHelixPreCommands: 'chmod +x $HELIX_WORKITEM_PAYLOAD/machine-setup.sh;. $HELIX_WORKITEM_PAYLOAD/machine-setup.sh;export PYTHONPATH=$HELIX_WORKITEM_PAYLOAD/scripts:$HELIX_WORKITEM_PAYLOAD'
+ - name: internalParam
+ value: ''
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- - HelixPreCommandWindows: 'set ORIGPYPATH=%PYTHONPATH%;py -3 -m venv %HELIX_WORKITEM_PAYLOAD%\.venv;call %HELIX_WORKITEM_PAYLOAD%\.venv\Scripts\activate.bat;echo on;set PYTHONPATH=;python -m pip install -U pip;python -m pip install azure.storage.blob==12.13.0;python -m pip install azure.storage.queue==12.4.0;python -m pip install urllib3==1.26.15;python -m pip install azure.identity==1.16.1;set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
- - HelixPostCommandsWindows: 'set PYTHONPATH=%ORIGPYPATH%'
- - ${{ if and(ne(parameters.osGroup, 'windows'), ne(parameters.osGroup, 'osx'), ne(parameters.osSubGroup, '_musl')) }}:
- - HelixPreCommandLinux: 'export ORIGPYPATH=$PYTHONPATH;export CRYPTOGRAPHY_ALLOW_OPENSSL_102=true;sudo apt-get -y install python3-venv;python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install -U pip;pip3 install azure.storage.blob==12.13.0;pip3 install azure.storage.queue==12.4.0;pip3 install azure.identity==1.16.1;pip3 install urllib3==1.26.15;export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
- - HelixPostCommandsLinux: 'export PYTHONPATH=$ORIGPYPATH'
- - ${{ if and(ne(parameters.osGroup, 'windows'), ne(parameters.osGroup, 'osx'), eq(parameters.osSubGroup, '_musl')) }}:
- - HelixPreCommandMusl: 'ulimit -n 4096;export ORIGPYPATH=$PYTHONPATH;sudo apk add py3-virtualenv;python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install -U pip;pip3 install azure.storage.blob==12.13.0;pip3 install azure.storage.queue==12.4.0;pip3 install azure.identity==1.16.1;pip3 install urllib3==1.26.15;export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
- - HelixPostCommandsMusl: 'export PYTHONPATH=$ORIGPYPATH'
- - ${{ if eq(parameters.osGroup, 'osx') }}:
- - HelixPreCommandOSX: 'export ORIGPYPATH=$PYTHONPATH;export CRYPTOGRAPHY_ALLOW_OPENSSL_102=true;python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install -U pip;pip3 install azure.storage.blob==12.13.0;pip3 install azure.storage.queue==12.4.0;pip3 install azure.identity==1.16.1;pip3 install urllib3==1.26.15;export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
- - HelixPostCommandOSX: 'export PYTHONPATH=$ORIGPYPATH'
-
- # extra private job settings
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandWindows)
- - AdditionalHelixPostCommands: $(HelixPostCommandsWindows)
- - IsInternal: -Internal
- - ${{ if and(ne(parameters.osGroup, 'windows'), ne(parameters.osGroup, 'osx'), ne(parameters.osSubGroup, '_musl')) }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandLinux)
- - AdditionalHelixPostCommands: $(HelixPostCommandsLinux)
- - IsInternal: --internal
- - ${{ if and(ne(parameters.osGroup, 'windows'), ne(parameters.osGroup, 'osx'), eq(parameters.osSubGroup, '_musl')) }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandMusl)
- - AdditionalHelixPostCommands: $(HelixPostCommandsMusl)
- - IsInternal: --internal
- - ${{ if eq(parameters.osGroup, 'osx') }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandOSX)
- - AdditionalHelixPostCommands: $(HelixPostCommandOSX)
- - IsInternal: --internal
+ - name: internalParam
+ value: --internal
- group: DotNet-HelixApi-Access
- group: dotnet-benchview
- - ${{ if not(and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'))) }}:
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandWindows)
- - AdditionalHelixPostCommands: $(HelixPostCommandsWindows)
- - ${{ if and(ne(parameters.osGroup, 'windows'), ne(parameters.osGroup, 'osx')) }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandLinux)
- - AdditionalHelixPostCommands: $(HelixPostCommandsLinux)
- - ${{ if eq(parameters.osGroup, 'osx') }}:
- - AdditionalHelixPreCommands: $(HelixPreCommandOSX)
- - AdditionalHelixPostCommands: $(HelixPostCommandOSX)
-
- ExtraSetupArguments: ''
- name: ExtraSetupArguments
${{ if ne(parameters.runtimeType, 'wasm') }}:
@@ -125,23 +78,7 @@ jobs:
_Framework: ${{ framework }}
steps:
- ${{ parameters.steps }}
- # run performance-setup
- - powershell: $(Build.SourcesDirectory)\eng\testing\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) -Kind ${{ parameters.runKind }} -LogicalMachine ${{ parameters.logicalMachine }} -UseLocalCommitTime ${{ parameters.extraSetupParameters }} ${{ parameters.additionalSetupParameters }}
- displayName: Performance Setup (Windows)
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $(Build.SourcesDirectory)/eng/testing/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) --kind ${{ parameters.runKind }} --logicalmachine ${{ parameters.logicalMachine }} --uselocalcommittime ${{ parameters.extraSetupParameters }} ${{ parameters.additionalSetupParameters }}
- displayName: Performance Setup (Linux/MAC)
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- # run ci-setup
- - script: $(Python) $(PerformanceDirectory)\scripts\ci_setup.py $(SetupArguments) $(ExtraSetupArguments) --output-file $(WorkItemDirectory)\machine-setup.cmd
- displayName: Run ci setup script (Windows)
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments) $(ExtraSetupArguments) --output-file $(WorkItemDirectory)/machine-setup.sh
- displayName: Run ci setup script (Linux/MAC)
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- # copy wasm packs if running on wasm
+ # copy wasm packs if running on wasm (TODO: move to run-performance-job.py)
- script: >-
mkdir -p $(librariesDownloadDir)/bin/wasm/data &&
cp -r $(librariesDownloadDir)/BrowserWasm/staging/dotnet-latest $(librariesDownloadDir)/bin/wasm &&
@@ -150,67 +87,16 @@ jobs:
find $(librariesDownloadDir)/bin/wasm -type f -exec chmod 664 {} \;
displayName: "Create wasm directory (Linux)"
condition: and(succeeded(), eq('${{ parameters.runtimeType }}', 'wasm'))
- # copy scenario support files
- - script: xcopy $(PerformanceDirectory)\scripts $(WorkItemDirectory)\scripts\/e && xcopy $(PerformanceDirectory)\src\scenarios\shared $(WorkItemDirectory)\shared\/e && xcopy $(PerformanceDirectory)\src\scenarios\staticdeps $(WorkItemDirectory)\staticdeps\/e
- displayName: Copy scenario support files (Windows)
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- - script: cp -r $(PerformanceDirectory)/scripts $(WorkItemDirectory)/scripts/ && cp -r $(PerformanceDirectory)/src/scenarios/shared $(WorkItemDirectory)/shared/ && cp -r $(PerformanceDirectory)/src/scenarios/staticdeps/ $(WorkItemDirectory)/staticdeps/
- displayName: Copy scenario support files (Linux/MAC)
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- # build Startup
- - script: $(PayloadDirectory)\dotnet\dotnet.exe publish -c Release -o $(WorkItemDirectory)\Startup -f net8.0 -r win-$(Architecture) --self-contained $(PerformanceDirectory)\src\tools\ScenarioMeasurement\Startup\Startup.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build Startup tool (Windows)
- env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- - script: $(PayloadDirectory)/dotnet/dotnet publish -c Release -o $(WorkItemDirectory)/startup -f net8.0 -r linux-$(Architecture) --self-contained $(PerformanceDirectory)/src/tools/ScenarioMeasurement/Startup/Startup.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build Startup tool (Linux)
- env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Linux'))
- - script: $(PayloadDirectory)/dotnet/dotnet publish -c Release -o $(WorkItemDirectory)/startup -f net8.0 -r osx-$(Architecture) --self-contained $(PerformanceDirectory)/src/tools/ScenarioMeasurement/Startup/Startup.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build Startup tool (MAC)
+ # run run-performance-job.py
+ - script: $(Python) $(Agent.BuildDirectory)/performance/scripts/run_performance_job.py --is-scenario --framework $(_Framework) $(internalParam) ${{ parameters.extraSetupParameters }} ${{ parameters.additionalSetupParameters }}
+ displayName: Run performance job script
env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Darwin'))
- # build SizeOnDisk
- - script: $(PayloadDirectory)\dotnet\dotnet.exe publish -c Release -o $(WorkItemDirectory)\SOD -f net8.0 -r win-$(Architecture) --self-contained $(PerformanceDirectory)\src\tools\ScenarioMeasurement\SizeOnDisk\SizeOnDisk.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build SizeOnDisk tool (Windows)
- env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- - script: $(PayloadDirectory)/dotnet/dotnet publish -c Release -o $(WorkItemDirectory)/SOD -f net8.0 -r linux-$(Architecture) --self-contained $(PerformanceDirectory)/src/tools/ScenarioMeasurement/SizeOnDisk/SizeOnDisk.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build SizeOnDisk tool (Linux)
- env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Linux'))
- - script: $(PayloadDirectory)/dotnet/dotnet publish -c Release -o $(WorkItemDirectory)/SOD -f net8.0 -r osx-$(Architecture) --self-contained $(PerformanceDirectory)/src/tools/ScenarioMeasurement/SizeOnDisk/SizeOnDisk.csproj -p:DisableTransitiveFrameworkReferenceDownloads=true
- displayName: Build SizeOnDisk tool (MAC)
- env:
- PERFLAB_TARGET_FRAMEWORKS: net8.0
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Darwin'))
-
- # Zip the workitem directory (for xharness (mobile) based workitems)
- - ${{ if or(eq(parameters.runKind, 'android_scenarios'), eq(parameters.runKind, 'ios_scenarios')) }}:
- - task: ArchiveFiles@2
- inputs:
- rootFolderOrFile: '$(WorkItemDirectory)'
- includeRootFolder: false
- archiveFile: '$(WorkItemDirectory).zip'
- verbose: True
+ HelixAccessToken: '$(HelixApiAccessToken)'
+ PerfCommandUploadToken: '$(PerfCommandUploadToken)'
+ PerfCommandUploadTokenLinux: '$(PerfCommandUploadTokenLinux)'
# run perf testing in helix
- template: /eng/pipelines/coreclr/templates/perf-send-to-helix.yml
parameters:
- HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
- HelixAccessToken: $(HelixApiAccessToken)
- HelixTargetQueues: $(Queue)
- HelixPreCommands: '$(AdditionalHelixPreCommands);$(SharedHelixPreCommands)' # $(HelixPreCommands) should follow $(AdditionalHelixPreCommands) because PYTHONPATH is cleared by the former
- HelixPostCommands: $(AdditionalHelixPostCommands)
- Creator: $(Creator)
- WorkItemTimeout: 4:00 # 4 hours
- WorkItemDirectory: '$(WorkItemDirectory)' # contains scenario tools, shared python scripts, dotnet tool
- CorrelationPayloadDirectory: '$(PayloadDirectory)' # contains performance repo and built product
ProjectFile: ${{ parameters.projectFile }}
osGroup: ${{ parameters.osGroup }}
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml
index 6284c6fd689..b5c7996da95 100644
--- a/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml
+++ b/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml
@@ -9,6 +9,7 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
+ crossBuild: '' # optional -- 'true' if this is a cross-build
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
@@ -24,6 +25,7 @@ jobs:
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
osSubgroup: ${{ parameters.osSubgroup }}
+ crossBuild: ${{ parameters.crossBuild }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
enableTelemetry: ${{ parameters.enableTelemetry }}
enablePublishBuildArtifacts: true
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml
index 3ef32f30b2d..78ebc0a8089 100644
--- a/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml
+++ b/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml
@@ -9,6 +9,7 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
+ crossBuild: '' # optional -- 'true' if this is a cross-build
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
@@ -25,6 +26,8 @@ jobs:
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
osSubgroup: ${{ parameters.osSubgroup }}
+ container: ${{ parameters.container }}
+ crossBuild: ${{ parameters.crossBuild }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
enableTelemetry: ${{ parameters.enableTelemetry }}
enablePublishBuildArtifacts: true
@@ -104,11 +107,16 @@ jobs:
- name: InputDirectory
value: '$(Build.SourcesDirectory)/artifacts/tests/coreclr/obj/${{ parameters.osGroup }}.${{ parameters.archType }}.$(buildConfigUpper)/Managed/nativeaot/SmokeTests'
+ - name: PublicQueuesCLIArg
+ ${{ if ne(variables['System.TeamProject'], 'internal') }}:
+ value: '-public_queues'
+ ${{ else }}:
+ value: ''
+
workspace:
clean: all
pool:
${{ parameters.pool }}
- container: ${{ parameters.container }}
steps:
- ${{ parameters.steps }}
@@ -116,7 +124,7 @@ jobs:
displayName: Enable python venv
condition: always()
- - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_collect_setup.py -payload_directory $(PayloadLocation) -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -platform $(osGroup) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) -max_size 25 # size in MB
+ - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_collect_setup.py -payload_directory $(PayloadLocation) -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -platform $(osGroup) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) $(PublicQueuesCLIArg) -max_size 25 # size in MB
displayName: ${{ format('SuperPMI setup ({0})', parameters.osGroup) }}
# Create required directories for merged mch collection and superpmi logs
@@ -139,7 +147,10 @@ jobs:
HelixAccessToken: $(HelixApiAccessToken)
HelixTargetQueues: $(Queue)
HelixPreCommands: $(HelixPreCommand)
- Creator: $(Creator)
+
+ ${{ if ne(variables['System.TeamProject'], 'internal') }}:
+ Creator: $(Build.DefinitionName)
+
WorkItemTimeout: 4:00 # 4 hours
WorkItemDirectory: '$(WorkItemDirectory)'
CorrelationPayloadDirectory: '$(CorrelationPayloadDirectory)'
@@ -170,25 +181,26 @@ jobs:
artifactName: 'SuperPMI_Collection_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
displayName: ${{ format('Upload artifacts SuperPMI {0}-{1} collection', parameters.collectionName, parameters.collectionType) }}
- # Add authenticated pip feed
- - task: PipAuthenticate@1
- displayName: 'Pip Authenticate'
- inputs:
- artifactFeeds: public/dotnet-public-pypi
- onlyAddExtraIndex: false
-
- # Ensure the Python azure-storage-blob package is installed before doing the upload.
- - script: $(PipScript) install --upgrade pip && $(PipScript) install azure.storage.blob==12.5.0 --force-reinstall && $(PipScript) install azure.identity==1.16.1 --force-reinstall
- displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
-
- - task: AzureCLI@2
- displayName: ${{ format('Upload SuperPMI {0}-{1} collection to Azure Storage', parameters.collectionName, parameters.collectionType) }}
- inputs:
- azureSubscription: 'superpmi-collect-rw'
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ # Add authenticated pip feed
+ - task: PipAuthenticate@1
+ displayName: 'Pip Authenticate'
+ inputs:
+ artifactFeeds: public/dotnet-public-pypi
+ onlyAddExtraIndex: false
+
+ # Ensure the Python azure-storage-blob package is installed before doing the upload.
+ - script: $(PipScript) install --upgrade pip && $(PipScript) install azure.storage.blob==12.5.0 --force-reinstall && $(PipScript) install azure.identity==1.16.1 --force-reinstall
+ displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
+
+ - task: AzureCLI@2
+ displayName: ${{ format('Upload SuperPMI {0}-{1} collection to Azure Storage', parameters.collectionName, parameters.collectionType) }}
+ inputs:
+ azureSubscription: 'superpmi-collect-rw'
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
# Always upload the available logs for diagnostics
- task: CopyFiles@2
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
index 2bca38b20b0..19a6f777804 100644
--- a/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
+++ b/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
@@ -9,6 +9,7 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
+ crossBuild: '' # optional -- 'true' if this is a cross-build
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 180 # optional -- timeout for the job
@@ -27,6 +28,7 @@ jobs:
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
osSubgroup: ${{ parameters.osSubgroup }}
+ crossBuild: ${{ parameters.crossBuild }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
enableTelemetry: ${{ parameters.enableTelemetry }}
enablePublishBuildArtifacts: true
@@ -144,10 +146,10 @@ jobs:
condition: always()
- task: CopyFiles@2
- displayName: Copying superpmi.md of all partitions
+ displayName: Copying superpmi.json of all partitions
inputs:
sourceFolder: '$(HelixResultLocation)'
- contents: '**/superpmi_*.md'
+ contents: '**/superpmi_*.json'
targetFolder: '$(SpmiDiffsLocation)'
condition: always()
@@ -160,7 +162,7 @@ jobs:
targetFolder: '$(SpmiDiffsLocation)'
condition: always()
- - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_summarize.py -diff_summary_dir $(SpmiDiffsLocation) -type $(diffType) -platform $(osGroup) -arch $(archType)
+ - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_summarize.py -source_directory $(Build.SourcesDirectory) -diff_summary_dir $(SpmiDiffsLocation) -type $(diffType) -platform $(osGroup) -arch $(archType)
displayName: ${{ format('Summarize ({0} {1}{2} {3})', parameters.diffType, parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
condition: always()
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-replay-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-replay-job.yml
index b8907cfc560..4047a8cb2f7 100644
--- a/eng/pipelines/coreclr/templates/run-superpmi-replay-job.yml
+++ b/eng/pipelines/coreclr/templates/run-superpmi-replay-job.yml
@@ -9,6 +9,7 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
+ crossBuild: '' # optional -- 'true' if this is a cross-build
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
@@ -24,6 +25,7 @@ jobs:
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
osSubgroup: ${{ parameters.osSubgroup }}
+ crossBuild: ${{ parameters.crossBuild }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
enableTelemetry: ${{ parameters.enableTelemetry }}
enablePublishBuildArtifacts: true
diff --git a/eng/pipelines/coreclr/templates/superpmi-collect-job.yml b/eng/pipelines/coreclr/templates/superpmi-collect-job.yml
index 6af16c3db87..0f33e1d29b3 100644
--- a/eng/pipelines/coreclr/templates/superpmi-collect-job.yml
+++ b/eng/pipelines/coreclr/templates/superpmi-collect-job.yml
@@ -3,7 +3,9 @@ parameters:
archType: ''
osGroup: ''
osSubgroup: ''
+ container: ''
liveLibrariesBuildConfig: ''
+ crossBuild: ''
variables: {}
pool: ''
runJobTemplate: '/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml'
@@ -24,6 +26,8 @@ jobs:
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
osSubgroup: ${{ parameters.osSubgroup }}
+ container: ${{ parameters.container }}
+ crossBuild: ${{ parameters.crossBuild }}
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
collectionType: ${{ parameters.collectionType }}
collectionName: ${{ parameters.collectionName }}
@@ -33,7 +37,16 @@ jobs:
- ${{ if eq(parameters.collectionName, 'coreclr_tests') }}:
- 'coreclr_common_test_build_p1_AnyOS_AnyCPU_${{parameters.buildConfig }}'
- variables: ${{ parameters.variables }}
+ variables:
+
+ - name: crossArg
+ value: ''
+ - ${{ if eq(parameters.crossBuild, true) }}:
+ - name: crossArg
+ value: '-cross'
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{insert}}: ${{ variable }}
steps:
# Extra steps that will be passed to the superpmi template and run before sending the job to helix (all of which is done in the template)
@@ -72,6 +85,6 @@ jobs:
displayName: 'generic managed test artifacts'
# Create Core_Root
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(buildConfig) $(archType) generatelayoutonly $(librariesOverrideArg)
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) $(buildConfig) $(archType) $(crossArg) generatelayoutonly $(librariesOverrideArg) /p:UsePublishedCrossgen2=false
displayName: Create Core_Root
condition: succeeded()
diff --git a/eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml b/eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml
index 2a85d76c078..daed124df41 100644
--- a/eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml
+++ b/eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml
@@ -35,6 +35,7 @@ steps:
condition: ${{ parameters.condition }}
shouldContinueOnError: ${{ parameters.continueOnError }}
environment:
+ _Creator: ${{ parameters.Creator }}
TargetOS: ${{ parameters.osGroup }}
TargetArchitecture: ${{ parameters.archType }}
MchFileTag: $(MchFileTag)
@@ -56,5 +57,4 @@ steps:
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
index 9794c736e2e..ab081b2c3c4 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
@@ -165,3 +165,4 @@ jobs:
creator: dotnet-bot
testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ buildAllTestsAsStandalone: true
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
index 7ce0a0c3568..714acd5b9c1 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
@@ -139,3 +139,4 @@ jobs:
creator: dotnet-bot
testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ buildAllTestsAsStandalone: true
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
index 8c9d16d9701..3eac90bcf95 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
@@ -112,6 +112,7 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
parameters:
creator: dotnet-bot
+ buildAllTestsAsStandalone: true
testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
@@ -149,5 +150,6 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
parameters:
creator: dotnet-bot
+ buildAllTestsAsStandalone: true
testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true /p:DevTeamProvisioning=adhoc /p:EnableAppSandbox=true
testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
diff --git a/eng/pipelines/global-build.yml b/eng/pipelines/global-build.yml
index 24962a6b65f..d86d5727e73 100644
--- a/eng/pipelines/global-build.yml
+++ b/eng/pipelines/global-build.yml
@@ -82,7 +82,7 @@ extends:
eq(variables['isRollingBuild'], true))
#
- # Build Libraries AllConfigurations. This exercises the code path where we build libraries for all
+ # Build Libraries AllConfigurations. This exercises the code path where we build libraries and tests for all
# configurations on a non Windows operating system.
#
- template: /eng/pipelines/common/platform-matrix.yml
@@ -93,7 +93,7 @@ extends:
- linux_x64_dev_innerloop
jobParameters:
nameSuffix: Libraries_AllConfigurations
- buildArgs: -subset libs -allconfigurations
+ buildArgs: -subset libs+libs.tests -allconfigurations
timeoutInMinutes: 120
condition:
or(
@@ -126,5 +126,5 @@ extends:
parameters:
platforms:
- name: Linux_x64
- targetRid: linux-x64
+ targetRID: linux-x64
container: SourceBuild_linux_x64
diff --git a/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml b/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml
index 276cc50f97d..49e7e517d9c 100644
--- a/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml
+++ b/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml
@@ -65,6 +65,14 @@ extends:
# displayName: Send to OneFuzz
# ONEFUZZ_TASK_WORKAROUND_START
+ - task: onefuzz-task@0
+ inputs:
+ onefuzzOSes: 'Windows'
+ env:
+ onefuzzDropDirectory: $(fuzzerProject)/deployment/AssemblyNameInfoFuzzer
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Send AssemblyNameInfoFuzzer to OneFuzz
+
- task: onefuzz-task@0
inputs:
onefuzzOSes: 'Windows'
@@ -73,6 +81,14 @@ extends:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: Send Base64Fuzzer to OneFuzz
+ - task: onefuzz-task@0
+ inputs:
+ onefuzzOSes: 'Windows'
+ env:
+ onefuzzDropDirectory: $(fuzzerProject)/deployment/Base64UrlFuzzer
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Send Base64UrlFuzzer to OneFuzz
+
- task: onefuzz-task@0
inputs:
onefuzzOSes: 'Windows'
@@ -89,6 +105,14 @@ extends:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: Send JsonDocumentFuzzer to OneFuzz
+ - task: onefuzz-task@0
+ inputs:
+ onefuzzOSes: 'Windows'
+ env:
+ onefuzzDropDirectory: $(fuzzerProject)/deployment/NrbfDecoderFuzzer
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Send NrbfDecoderFuzzer to OneFuzz
+
- task: onefuzz-task@0
inputs:
onefuzzOSes: 'Windows'
@@ -113,6 +137,14 @@ extends:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
displayName: Send TextEncodingFuzzer to OneFuzz
+ - task: onefuzz-task@0
+ inputs:
+ onefuzzOSes: 'Windows'
+ env:
+ onefuzzDropDirectory: $(fuzzerProject)/deployment/TypeNameFuzzer
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Send TypeNameFuzzer to OneFuzz
+
- task: onefuzz-task@0
inputs:
onefuzzOSes: 'Windows'
diff --git a/eng/pipelines/libraries/helix-queues-setup.yml b/eng/pipelines/libraries/helix-queues-setup.yml
index 3d4185b9338..5b660a70bf7 100644
--- a/eng/pipelines/libraries/helix-queues-setup.yml
+++ b/eng/pipelines/libraries/helix-queues-setup.yml
@@ -68,6 +68,7 @@ jobs:
- Ubuntu.2204.Amd64.Open
- (Debian.11.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
- (Mariner.2.0.Amd64.Open)Ubuntu.2204.Amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-helix-amd64
+ - (AzureLinux.3.0.Amd64.Open)Ubuntu.2204.Amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-helix-amd64
- (openSUSE.15.2.Amd64.Open)Ubuntu.2204.Amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:opensuse-15.2-helix-amd64
- ${{ if or(ne(parameters.jobParameters.isExtraPlatformsBuild, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- (Centos.8.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-helix
diff --git a/eng/pipelines/libraries/stress/http.yml b/eng/pipelines/libraries/stress/http.yml
index ee772d0da0e..3290eda51ae 100644
--- a/eng/pipelines/libraries/stress/http.yml
+++ b/eng/pipelines/libraries/stress/http.yml
@@ -12,6 +12,7 @@ schedules:
include:
- main
- release/8.0
+ - release/9.0
variables:
- template: ../variables.yml
@@ -99,7 +100,10 @@ extends:
displayName: Docker NanoServer
timeoutInMinutes: 150
variables:
- DUMPS_SHARE_MOUNT_ROOT: "C:/dumps-share"
+ # This will get assigned to the DUMPS_SHARE_MOUNT_ROOT environment
+ # variable in the stress test script. We need to keep the
+ # DUMPS_SHARE_MOUNT_ROOT variable empty during the build step.
+ DUMPS_SHARE_MOUNT_ROOT_PATH: "C:/dumps-share"
# The 1es-windows-2022-open image has an issue where the Chocolatey-installed V1 docker-compose takes precendence over the
# V2 docker-compose required by the stress tests, see: https://github.com/actions/runner-images/issues/7080
@@ -140,6 +144,7 @@ extends:
- powershell: |
cd '$(httpStressProject)'
+ $env:DUMPS_SHARE_MOUNT_ROOT = $env:DUMPS_SHARE_MOUNT_ROOT_PATH
$env:CLIENT_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/client/3.0"
$env:SERVER_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/server/3.0"
New-Item -Force $env:CLIENT_DUMPS_SHARE -ItemType Directory
@@ -152,6 +157,7 @@ extends:
- powershell: |
cd '$(httpStressProject)'
+ $env:DUMPS_SHARE_MOUNT_ROOT = $env:DUMPS_SHARE_MOUNT_ROOT_PATH
$env:CLIENT_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/client/2.0"
$env:SERVER_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/server/2.0"
New-Item -Force $env:CLIENT_DUMPS_SHARE -ItemType Directory
@@ -164,6 +170,7 @@ extends:
- powershell: |
cd '$(httpStressProject)'
+ $env:DUMPS_SHARE_MOUNT_ROOT = $env:DUMPS_SHARE_MOUNT_ROOT_PATH
$env:CLIENT_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/client/1.1"
$env:SERVER_DUMPS_SHARE="$(Build.ArtifactStagingDirectory)/dumps/server/1.1"
New-Item -Force $env:CLIENT_DUMPS_SHARE -ItemType Directory
diff --git a/eng/pipelines/libraries/stress/ssl.yml b/eng/pipelines/libraries/stress/ssl.yml
index 602e5bcf73d..230a2bef377 100644
--- a/eng/pipelines/libraries/stress/ssl.yml
+++ b/eng/pipelines/libraries/stress/ssl.yml
@@ -12,6 +12,7 @@ schedules:
include:
- main
- release/8.0
+ - release/9.0
variables:
- template: ../variables.yml
diff --git a/eng/pipelines/libraries/superpmi-postprocess-step.yml b/eng/pipelines/libraries/superpmi-postprocess-step.yml
index 343cb1ee58c..5ee73194e46 100644
--- a/eng/pipelines/libraries/superpmi-postprocess-step.yml
+++ b/eng/pipelines/libraries/superpmi-postprocess-step.yml
@@ -57,28 +57,29 @@ steps:
displayName: 'Upload artifacts SuperPMI ${{ parameters.SuperPmiCollectionName }}-${{ parameters.SuperPmiCollectionType }} collection'
condition: always()
- # Add authenticated pip feed
- - task: PipAuthenticate@1
- displayName: 'Pip Authenticate'
- inputs:
- artifactFeeds: public/dotnet-public-pypi
- onlyAddExtraIndex: false
- condition: always()
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ # Add authenticated pip feed
+ - task: PipAuthenticate@1
+ displayName: 'Pip Authenticate'
+ inputs:
+ artifactFeeds: public/dotnet-public-pypi
+ onlyAddExtraIndex: false
+ condition: always()
- # Ensure the Python azure-storage-blob package is installed before doing the upload.
- - script: ${{ parameters.PipScript }} install --upgrade pip && ${{ parameters.PipScript }} install azure.storage.blob==12.5.0 --force-reinstall && ${{ parameters.PipScript }} install azure.identity==1.16.1 --force-reinstall
- displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
- condition: always()
+ # Ensure the Python azure-storage-blob package is installed before doing the upload.
+ - script: ${{ parameters.PipScript }} install --upgrade pip && ${{ parameters.PipScript }} install azure.storage.blob==12.5.0 --force-reinstall && ${{ parameters.PipScript }} install azure.identity==1.16.1 --force-reinstall
+ displayName: Upgrade Pip to latest and install azure-storage-blob and azure-identity Python packages
+ condition: always()
- - task: AzureCLI@2
- displayName: 'Upload SuperPMI ${{ parameters.SuperPmiCollectionName }}-${{ parameters.SuperPmiCollectionType }} collection to Azure Storage'
- inputs:
- azureSubscription: 'superpmi-collect-rw'
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- ${{ parameters.PythonScript }} $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch ${{ parameters.archType }} -build_type ${{ parameters.buildConfig }} -mch_files ${{ parameters.MergedMchFileLocation }}${{ parameters.SuperPmiCollectionName }}.${{ parameters.SuperPmiCollectionType }}.${{ parameters.osGroup }}.${{ parameters.archType }}.${{ parameters.buildConfig }}.mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/${{ parameters.osGroup }}.x64.${{ parameters.buildConfigUpper }}
- condition: always()
+ - task: AzureCLI@2
+ displayName: 'Upload SuperPMI ${{ parameters.SuperPmiCollectionName }}-${{ parameters.SuperPmiCollectionType }} collection to Azure Storage'
+ inputs:
+ azureSubscription: 'superpmi-collect-rw'
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ ${{ parameters.PythonScript }} $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch ${{ parameters.archType }} -build_type ${{ parameters.buildConfig }} -mch_files ${{ parameters.MergedMchFileLocation }}${{ parameters.SuperPmiCollectionName }}.${{ parameters.SuperPmiCollectionType }}.${{ parameters.osGroup }}.${{ parameters.archType }}.${{ parameters.buildConfig }}.mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/${{ parameters.osGroup }}.x64.${{ parameters.buildConfigUpper }}
+ condition: always()
- task: CopyFiles@2
displayName: Copying superpmi.log of all partitions
diff --git a/eng/pipelines/runtime-llvm.yml b/eng/pipelines/runtime-llvm.yml
index 5be2a5b063a..16c337b406d 100644
--- a/eng/pipelines/runtime-llvm.yml
+++ b/eng/pipelines/runtime-llvm.yml
@@ -144,7 +144,7 @@ extends:
nameSuffix: AllSubsets_Mono_LLVMFULLAOT_RuntimeTests
runtimeVariant: llvmfullaot
buildArgs: -s mono+libs+clr.hosts+clr.iltools -c $(_BuildConfig) /p:MonoEnableLLVM=true
- timeoutInMinutes: 360
+ timeoutInMinutes: 400
condition: >-
or(
eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
@@ -191,7 +191,7 @@ extends:
nameSuffix: AllSubsets_Mono_LLVMFULLAOT_RuntimeIntrinsicsTests
runtimeVariant: llvmfullaot
buildArgs: -s mono+libs+clr.hosts+clr.iltools -c $(_BuildConfig) /p:MonoEnableLLVM=true
- timeoutInMinutes: 360
+ timeoutInMinutes: 400
condition: >-
or(
eq(stageDependencies.EvaluatePaths.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
diff --git a/eng/pipelines/runtime-official.yml b/eng/pipelines/runtime-official.yml
index 4cac3562967..55021be6e29 100644
--- a/eng/pipelines/runtime-official.yml
+++ b/eng/pipelines/runtime-official.yml
@@ -41,12 +41,13 @@ extends:
# Localization build
#
- - template: /eng/common/templates-official/job/onelocbuild.yml
- parameters:
- MirrorRepo: runtime
- MirrorBranch: main
- LclSource: lclFilesfromPackage
- LclPackageId: 'LCL-JUNO-PROD-RUNTIME'
+ - ${{ if eq(variables['Build.SourceBranch'], 'refs/heads/release/9.0') }}:
+ - template: /eng/common/templates-official/job/onelocbuild.yml
+ parameters:
+ MirrorRepo: runtime
+ MirrorBranch: release/9.0
+ LclSource: lclFilesfromPackage
+ LclPackageId: 'LCL-JUNO-PROD-RUNTIME'
#
# Source Index Build
@@ -574,7 +575,7 @@ extends:
parameters:
platforms:
- name: Linux_x64
- targetRid: linux-x64
+ targetRID: linux-x64
container: SourceBuild_linux_x64
#
diff --git a/eng/pipelines/runtime-wasm-perf.yml b/eng/pipelines/runtime-wasm-perf.yml
index 39645a501ec..b07b6197733 100644
--- a/eng/pipelines/runtime-wasm-perf.yml
+++ b/eng/pipelines/runtime-wasm-perf.yml
@@ -2,6 +2,12 @@
# wasm jobs. This file is essentially so we can point the pipeline in azdo
# UI to this, and thus avoid any scheduled triggers
+parameters:
+- name: perfBranch
+ displayName: Performance Repo Branch
+ type: string
+ default: 'main'
+
trigger: none
pr:
@@ -32,9 +38,7 @@ extends:
runProfile: 'v8'
collectHelixLogsScript: ${{ variables._wasmCollectHelixLogsScript }}
onlySanityCheck: true
- #perfForkToUse: # dummy change
- #url: https://github.com/radical/performance
- #branch: fix-build
+ perfBranch: ${{ parameters.perfBranch }}
#downloadSpecificBuild:
#buildId: '1878694'
#pipeline: 'perf-wasm'
diff --git a/eng/pipelines/runtime.yml b/eng/pipelines/runtime.yml
index 7b4cfdb92cd..8426c37b206 100644
--- a/eng/pipelines/runtime.yml
+++ b/eng/pipelines/runtime.yml
@@ -870,6 +870,7 @@ extends:
- browser_wasm
- browser_wasm_win
- wasi_wasm
+ - wasi_wasm_win
nameSuffix: _Smoke_AOT
runAOT: true
shouldRunSmokeOnly: true
@@ -1857,11 +1858,11 @@ extends:
parameters:
platforms:
- name: CentOS8
- targetRid: centos.8-x64
+ targetRID: centos.8-x64
nonPortable: true
container: SourceBuild_centos_x64
- name: NonexistentRID
baseOS: linux
- targetRid: banana.24-x64
+ targetRID: banana.24-x64
nonPortable: true
container: SourceBuild_centos_x64
diff --git a/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml b/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
index e84634cd006..5b1887c9aff 100644
--- a/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
+++ b/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
@@ -31,27 +31,27 @@ steps:
- ${{ if eq(parameters.platform, 'browser_wasm_win') }}:
- script: |
call $(Build.SourcesDirectory)\wasm-tools\emsdk\emsdk_env
- $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} $(crossArg) tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build WebAssembly tests
- ${{ elseif eq(parameters.platform, 'wasi_wasm_win') }}:
- script: |
- $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} wasi tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} $(crossArg) wasi tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build WebAssembly tests
- ${{ elseif eq(parameters.platform, 'Browser_wasm_linux_x64_naot_llvm') }}:
- script: |
source $(Build.SourcesDirectory)/wasm-tools/emsdk/emsdk_env.sh
- $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) -browser tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) $(crossArg) -browser tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build WebAssembly tests
- ${{ elseif eq(parameters.platform, 'wasi_wasm_linux_x64_naot_llvm') }}:
- script: |
- $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) -wasi tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) $(crossArg) -wasi tree nativeaot /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build WebAssembly tests
- ${{ elseif eq(parameters.osGroup, 'windows') }}:
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} ${{ parameters.testFilter }} /p:NativeAotMultimodule=true /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} $(crossArg) ${{ parameters.testFilter }} /p:NativeAotMultimodule=true /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build tests
- ${{ else }}:
- - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} 'tree nativeaot' /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
+ - script: $(Build.SourcesDirectory)/src/tests/build$(scriptExt) nativeaot $(buildConfigUpper) ${{ parameters.archType }} $(crossArg) 'tree nativeaot' /p:LibrariesConfiguration=${{ parameters.librariesConfiguration }}
displayName: Build tests
- ${{ if in(parameters.platform, 'browser_wasm_win', 'wasi_wasm_win') }}:
diff --git a/eng/references.targets b/eng/references.targets
index d54606f17ab..2b0035d3054 100644
--- a/eng/references.targets
+++ b/eng/references.targets
@@ -39,7 +39,7 @@
+ Condition="$(NetCoreAppLibrary.Contains('%(Filename);')) and '%(ProjectReferenceWithConfiguration.Private)' == ''" />
diff --git a/eng/resolveContract.targets b/eng/resolveContract.targets
index fb93fcd09e9..9e5189a7572 100644
--- a/eng/resolveContract.targets
+++ b/eng/resolveContract.targets
@@ -39,8 +39,8 @@
+ DependsOnTargets="ResolveProjectReferences;GetTargetPathWithTargetPlatformMoniker"
+ BeforeTargets="GetTargetPath">
diff --git a/eng/targetingpacks.targets b/eng/targetingpacks.targets
index 837ec7601fd..c61a849d87a 100644
--- a/eng/targetingpacks.targets
+++ b/eng/targetingpacks.targets
@@ -109,19 +109,6 @@
-
-
-
-
-
-
-
-
0 ]]; then
echo "Total dumps found in $dump_folder: $total_dumps"
- xunitlogchecker_file_name="$HELIX_CORRELATION_PAYLOAD/XUnitLogChecker.dll"
- dotnet_file_name="$RUNTIME_PATH/dotnet"
-
- if [[ ! -f $dotnet_file_name ]]; then
- echo "'$dotnet_file_name' was not found. Unable to run XUnitLogChecker."
- xunitlogchecker_exit_code=1
- elif [[ ! -f $xunitlogchecker_file_name ]]; then
- echo "'$xunitlogchecker_file_name' was not found. Unable to print dump file contents."
+ xunitlogchecker_file_name="$HELIX_CORRELATION_PAYLOAD/XUnitLogChecker"
+
+ if [[ ! -f $xunitlogchecker_file_name ]]; then
+ echo "XUnitLogChecker does not exist in the expected location: $xunitlogchecker_file_name"
xunitlogchecker_exit_code=2
elif [[ ! -d $dump_folder ]]; then
echo "The dump directory '$dump_folder' does not exist."
else
echo "Executing XUnitLogChecker in $dump_folder..."
- cmd="$dotnet_file_name --roll-forward Major $xunitlogchecker_file_name --dumps-path $dump_folder"
+ cmd="$xunitlogchecker_file_name --dumps-path $dump_folder"
echo "$cmd"
$cmd
xunitlogchecker_exit_code=$?
@@ -207,7 +203,7 @@ if [[ $system_name == "Linux" && $test_exitcode -ne 0 ]]; then
if [[ -e /proc/sys/kernel/core_uses_pid && "1" == $(cat /proc/sys/kernel/core_uses_pid) ]]; then
core_name_uses_pid=1
fi
-
+
# The osx dumps are too large to egress the machine
echo Looking around for any Linux dumps...
@@ -240,8 +236,9 @@ elif [[ -z "$__IsXUnitLogCheckerSupported" ]]; then
elif [[ "$__IsXUnitLogCheckerSupported" != "1" ]]; then
echo "XUnitLogChecker not supported for this test case. Skipping."
else
+ echo "XUnitLogChecker status: $__IsXUnitLogCheckerSupported"
echo ----- start =============== XUnitLogChecker Output =====================================================
-
+
invoke_xunitlogchecker "$HELIX_DUMP_FOLDER"
if [[ $xunitlogchecker_exit_code -ne 0 ]]; then
diff --git a/eng/testing/performance/android_scenarios.proj b/eng/testing/performance/android_scenarios.proj
index 4d0aad300cd..fb18ea62bbb 100644
--- a/eng/testing/performance/android_scenarios.proj
+++ b/eng/testing/performance/android_scenarios.proj
@@ -16,40 +16,40 @@
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ %HELIX_WORKITEM_ROOT%\performance\src\scenarios\
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $HELIX_WORKITEM_ROOT/performance/src/scenarios/$(WorkItemDirectory)
- cd $(ScenarioDirectory)helloandroid;copy %HELIX_CORRELATION_PAYLOAD%\HelloAndroid.apk .;$(Python) pre.py --apk-name HelloAndroid.apk
+ cd $(ScenarioDirectory)helloandroid;copy %HELIX_WORKITEM_ROOT%\HelloAndroid.apk .;$(Python) pre.py --apk-name HelloAndroid.apk$(Python) test.py sod --scenario-name "%(Identity)"$(Python) post.py$(WorkItemDirectory)
- cd $(ScenarioDirectory)helloandroid;copy %HELIX_CORRELATION_PAYLOAD%\HelloAndroid.apk .;$(Python) pre.py --unzip --apk-name HelloAndroid.apk
+ cd $(ScenarioDirectory)helloandroid;copy %HELIX_WORKITEM_ROOT%\HelloAndroid.apk .;$(Python) pre.py --unzip --apk-name HelloAndroid.apk$(Python) test.py sod --scenario-name "%(Identity)"$(Python) post.py$(WorkItemDirectory)
- cd $(ScenarioDirectory)bdnandroid;copy %HELIX_CORRELATION_PAYLOAD%\MonoBenchmarksDroid.apk .;$(Python) pre.py --apk-name MonoBenchmarksDroid.apk
+ cd $(ScenarioDirectory)bdnandroid;copy %HELIX_WORKITEM_ROOT%\MonoBenchmarksDroid.apk .;$(Python) pre.py --apk-name MonoBenchmarksDroid.apk$(Python) test.py sod --scenario-name "%(Identity)"$(Python) post.py$(WorkItemDirectory)
- cd $(ScenarioDirectory)bdnandroid;copy %HELIX_CORRELATION_PAYLOAD%\MonoBenchmarksDroid.apk .;$(Python) pre.py --unzip --apk-name MonoBenchmarksDroid.apk
+ cd $(ScenarioDirectory)bdnandroid;copy %HELIX_WORKITEM_ROOT%\MonoBenchmarksDroid.apk .;$(Python) pre.py --unzip --apk-name MonoBenchmarksDroid.apk$(Python) test.py sod --scenario-name "%(Identity)"$(Python) post.py$(WorkItemDirectory)
- echo on;set XHARNESSPATH=$(XharnessPath);cd $(ScenarioDirectory)bdnandroid;copy %HELIX_CORRELATION_PAYLOAD%\MonoBenchmarksDroid.apk .;$(Python) pre.py --restart-device --apk-name MonoBenchmarksDroid.apk
+ echo on;set XHARNESSPATH=$(XharnessPath);cd $(ScenarioDirectory)bdnandroid;copy %HELIX_WORKITEM_ROOT%\MonoBenchmarksDroid.apk .;$(Python) pre.py --restart-device --apk-name MonoBenchmarksDroid.apk$(Python) test.py androidinstrumentation --package-path .\pub\MonoBenchmarksDroid.apk --package-name com.microsoft.maui.benchmarks --instrumentation-name com.microsoft.maui.MainInstrumentation --scenario-name "%(Identity)"$(Python) post.py00:30:00
diff --git a/eng/testing/performance/blazor_perf.proj b/eng/testing/performance/blazor_perf.proj
index bacb52650c1..45ad650dc1d 100644
--- a/eng/testing/performance/blazor_perf.proj
+++ b/eng/testing/performance/blazor_perf.proj
@@ -20,7 +20,7 @@
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ %HELIX_WORKITEM_ROOT%\performance\src\scenarios\$(ScenarioDirectory)blazorminapp\$(ScenarioDirectory)blazor\$(ScenarioDirectory)blazorpizza\
@@ -32,7 +32,7 @@
pub\wwwroot
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $HELIX_WORKITEM_ROOT/performance/src/scenarios/$(ScenarioDirectory)blazorminapp/$(ScenarioDirectory)blazor/$(ScenarioDirectory)blazorpizza/
diff --git a/eng/testing/performance/crossgen_perf.proj b/eng/testing/performance/crossgen_perf.proj
index 252ec7eaedb..4b7d163eaa2 100644
--- a/eng/testing/performance/crossgen_perf.proj
+++ b/eng/testing/performance/crossgen_perf.proj
@@ -13,7 +13,7 @@
python$(HelixPreCommands)%HELIX_CORRELATION_PAYLOAD%\Core_Root
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ %HELIX_WORKITEM_ROOT%\performance\src\scenarios\$(ScenarioDirectory)crossgen\$(ScenarioDirectory)crossgen2\
@@ -21,7 +21,7 @@
python3$(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update;chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk$HELIX_CORRELATION_PAYLOAD/Core_Root
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $HELIX_WORKITEM_ROOT/performance/src/scenarios/$(ScenarioDirectory)crossgen/$(ScenarioDirectory)crossgen2/
diff --git a/eng/testing/performance/ios_scenarios.proj b/eng/testing/performance/ios_scenarios.proj
index 1e4c59acd2b..1bc2a4ef5f9 100644
--- a/eng/testing/performance/ios_scenarios.proj
+++ b/eng/testing/performance/ios_scenarios.proj
@@ -25,11 +25,11 @@
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ %HELIX_WORKITEM_ROOT%\performance\src\scenarios\
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $HELIX_WORKITEM_ROOT/performance/src/scenarios/
diff --git a/eng/testing/performance/microbenchmarks.proj b/eng/testing/performance/microbenchmarks.proj
deleted file mode 100644
index 8453f469cd0..00000000000
--- a/eng/testing/performance/microbenchmarks.proj
+++ /dev/null
@@ -1,170 +0,0 @@
-
-
-
- %HELIX_WORKITEM_ROOT%\performance
- $(HelixPreCommands) && robocopy /np /nfl /e %HELIX_CORRELATION_PAYLOAD%\performance $(PerformanceDirectory) /XD %HELIX_CORRELATION_PAYLOAD%\performance\.git
- $(PerformanceDirectory)\scripts\benchmarks_ci.py --csproj $(PerformanceDirectory)\$(TargetCsproj)
- --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%
- python
- %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
- %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
- $(HelixPreCommands);call $(PerformanceDirectory)\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%
- %HELIX_WORKITEM_ROOT%\artifacts\BenchmarkDotNet.Artifacts
- %HELIX_WORKITEM_ROOT%\artifacts\BenchmarkDotNet.Artifacts_Baseline
- $(PerformanceDirectory)\src\tools\ResultsComparer\ResultsComparer.csproj
- $(PerformanceDirectory)\tools\dotnet\$(Architecture)\dotnet.exe
- %25%25
- %HELIX_WORKITEM_ROOT%\testResults.xml
-
-
-
- $HELIX_CORRELATION_PAYLOAD
- $HELIX_WORKITEM_ROOT/performance
- $(HelixPreCommands);cp -R $(BaseDirectory)/performance $(PerformanceDirectory)
-
-
-
- $HELIX_WORKITEM_PAYLOAD
- $(BaseDirectory)
-
-
-
- $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)
- --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP
- python3
- $(BaseDirectory)/Core_Root/corerun
- $(BaseDirectory)/Baseline_Core_Root/corerun
- $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh
- $HELIX_WORKITEM_ROOT/artifacts/BenchmarkDotNet.Artifacts
- $HELIX_WORKITEM_ROOT/artifacts/BenchmarkDotNet.Artifacts_Baseline
- $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj
- $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet
- %25
- $HELIX_WORKITEM_ROOT/testResults.xml
-
-
-
- $(CliArguments) --run-isolated --wasm --dotnet-path %24HELIX_CORRELATION_PAYLOAD/dotnet/
-
-
-
- --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\$(ProductVersion)\corerun.exe
-
-
- --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/$(ProductVersion)/corerun
-
-
-
- $(HelixPreCommands);%HELIX_CORRELATION_PAYLOAD%\monoaot\mono-aot-cross --llvm --version
-
-
- $(HelixPreCommands);$HELIX_CORRELATION_PAYLOAD/monoaot/mono-aot-cross --llvm --version
-
-
-
- --corerun $(CoreRun)
-
-
-
- --corerun $(BaselineCoreRun)
-
-
-
- $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(PERFLAB_Framework) $(PerfLabArguments)
-
-
-
- $(WorkItemCommand) $(CliArguments)
-
-
-
- 6:00
- 1:30
-
- $(ExtraBenchmarkDotNetArguments) --filter System.Tests.Perf_*
-
-
-
-
- %(Identity)
-
-
-
-
- 30
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
- $(WorkItemDirectory)
- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
-
- if [ "x$PERF_PREREQS_INSTALL_FAILED" = "x1" ]; then
- echo "\n\n** Error: Failed to install prerequisites **\n\n"; (exit 1);
- else
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)";
- fi
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
- $(DotnetExe) run -f $(PERFLAB_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
- $(WorkItemTimeout)
-
-
-
-
-
- $(WorkItemDirectory)
- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
-
- if [ "x$PERF_PREREQS_INSTALL_FAILED" = "x1" ]; then
- echo "\n\n** Error: Failed to install prerequisites **\n\n"; (exit 1);
- else
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)";
- fi
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
- $(DotnetExe) run -f $(PERFLAB_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
- 4:00
-
-
-
diff --git a/eng/testing/performance/performance-setup.ps1 b/eng/testing/performance/performance-setup.ps1
deleted file mode 100644
index 8e9ff873646..00000000000
--- a/eng/testing/performance/performance-setup.ps1
+++ /dev/null
@@ -1,228 +0,0 @@
-Param(
- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
- [string] $CoreRootDirectory,
- [string] $BaselineCoreRootDirectory,
- [string] $Architecture="x64",
- [string] $Framework,
- [string] $CompilationMode="Tiered",
- [string] $Repository=$env:BUILD_REPOSITORY_NAME,
- [string] $Branch=$env:BUILD_SOURCEBRANCH,
- [string] $CommitSha=$env:BUILD_SOURCEVERSION,
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
- [string] $RunCategories="Libraries Runtime",
- [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
- [string] $Kind="micro",
- [switch] $LLVM,
- [switch] $MonoInterpreter,
- [switch] $MonoAOT,
- [switch] $Internal,
- [switch] $Compare,
- [string] $MonoDotnet="",
- [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind",
- [string] $LogicalMachine="",
- [switch] $AndroidMono,
- [switch] $iOSMono,
- [switch] $iOSNativeAOT,
- [switch] $NoDynamicPGO,
- [switch] $PhysicalPromotion,
- [switch] $NoR2R,
- [string] $ExperimentName,
- [switch] $iOSLlvmBuild,
- [switch] $iOSStripSymbols,
- [switch] $HybridGlobalization,
- [string] $MauiVersion,
- [switch] $UseLocalCommitTime
-)
-
-$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
-$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
-$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
-
-$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
-$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
-$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
-$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
-$Creator = $env:BUILD_DEFINITIONNAME
-$PerfLabArguments = ""
-$HelixSourcePrefix = "pr"
-
-$Queue = ""
-
-if ($Internal) {
- switch ($LogicalMachine) {
- "perftiger" { $Queue = "Windows.11.Amd64.Tiger.Perf" }
- "perftiger_crossgen" { $Queue = "Windows.11.Amd64.Tiger.Perf" }
- "perfowl" { $Queue = "Windows.11.Amd64.Owl.Perf" }
- "perfsurf" { $Queue = "Windows.11.Arm64.Surf.Perf" }
- "perfpixel4a" { $Queue = "Windows.11.Amd64.Pixel.Perf" }
- "perfampere" { $Queue = "Windows.Server.Arm64.Perf" }
- "perfviper" { $Queue = "Windows.11.Amd64.Viper.Perf" }
- "cloudvm" { $Queue = "Windows.10.Amd64" }
- Default { $Queue = "Windows.11.Amd64.Tiger.Perf" }
- }
- $PerfLabArguments = "--upload-to-perflab-container"
- $ExtraBenchmarkDotNetArguments = ""
- $Creator = ""
- $HelixSourcePrefix = "official"
-}
-else {
- $Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
-}
-
-if ($MonoInterpreter) {
- $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
-}
-
-if ($MonoDotnet -ne "") {
- $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
- if($ExtraBenchmarkDotNetArguments -eq "")
- {
- #FIX ME: We need to block these tests as they don't run on mono for now
- $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
- }
- else
- {
- #FIX ME: We need to block these tests as they don't run on mono for now
- $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
- }
-}
-
-if ($NoDynamicPGO) {
- $Configurations += " PGOType=nodynamicpgo"
-}
-
-if ($PhysicalPromotion) {
- $Configurations += " PhysicalPromotionType=physicalpromotion"
-}
-
-if ($NoR2R) {
- $Configurations += " R2RType=nor2r"
-}
-
-if ($ExperimentName) {
- $Configurations += " ExperimentName=$ExperimentName"
- if ($ExperimentName -eq "memoryRandomization") {
- $ExtraBenchmarkDotNetArguments += " --memoryRandomization true"
- }
-}
-
-if ($iOSMono) {
- $Configurations += " iOSLlvmBuild=$iOSLlvmBuild"
- $Configurations += " iOSStripSymbols=$iOSStripSymbols"
-}
-
-if ($iOSNativeAOT) {
- $Configurations += " iOSStripSymbols=$iOSStripSymbols"
-}
-
-if ($HybridGlobalization -eq "True") {
- $Configurations += " HybridGlobalization=True"
-}
-
-# FIX ME: This is a workaround until we get this from the actual pipeline
-$CleanedBranchName = "main"
-if($Branch.Contains("refs/heads/release"))
-{
- $CleanedBranchName = $Branch.replace('refs/heads/', '')
-}
-$CommonSetupArguments="--channel $CleanedBranchName --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
-$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
-
-if ($NoDynamicPGO) {
- $SetupArguments = "$SetupArguments --no-dynamic-pgo"
-}
-
-if ($PhysicalPromotion) {
- $SetupArguments = "$SetupArguments --physical-promotion"
-}
-
-if ($NoR2R) {
- $SetupArguments = "$SetupArguments --no-r2r"
-}
-
-if ($ExperimentName) {
- $SetupArguments = "$SetupArguments --experiment-name $ExperimentName"
-}
-
-if ($UseLocalCommitTime) {
- $LocalCommitTime = (git show -s --format=%ci $CommitSha)
- $SetupArguments = "$SetupArguments --commit-time `"$LocalCommitTime`""
-}
-
-if ($RunFromPerformanceRepo) {
- $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
-
- robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
-}
-else {
- git clone --branch main --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
-}
-
-if ($MonoDotnet -ne "") {
- $UsingMono = "true"
- $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
- Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
-}
-
-if ($UseCoreRun) {
- $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
- Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
-}
-if ($UseBaselineCoreRun) {
- $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
- Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
-}
-
-if ($MauiVersion -ne "") {
- $SetupArguments = "$SetupArguments --maui-version $MauiVersion"
-}
-
-if ($AndroidMono) {
- if(!(Test-Path $WorkItemDirectory))
- {
- mkdir $WorkItemDirectory
- }
- Copy-Item -path "$SourceDirectory\MonoBenchmarksDroid.apk" $PayloadDirectory -Verbose
- Copy-Item -path "$SourceDirectory\androidHelloWorld\HelloAndroid.apk" $PayloadDirectory -Verbose
- $SetupArguments = $SetupArguments -replace $Architecture, 'arm64'
-}
-
-$DocsDir = (Join-Path $PerformanceDirectory "docs")
-robocopy $DocsDir $WorkItemDirectory
-
-# Set variables that we will need to have in future steps
-$ci = $true
-
-. "$PSScriptRoot\..\..\common\pipeline-logging-functions.ps1"
-
-# Directories
-Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
-
-# Script Arguments
-Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'MonoAOT' -Value "$MonoAOT" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'iOSLlvmBuild' -Value "$iOSLlvmBuild" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'iOSStripSymbols' -Value "$iOSStripSymbols" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'hybridGlobalization' -Value "$HybridGlobalization" -IsMultiJobVariable $false
-
-# Helix Arguments
-Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
-
-exit 0
diff --git a/eng/testing/performance/performance-setup.sh b/eng/testing/performance/performance-setup.sh
deleted file mode 100755
index 489b3a22ccc..00000000000
--- a/eng/testing/performance/performance-setup.sh
+++ /dev/null
@@ -1,564 +0,0 @@
-#!/usr/bin/env bash
-
-# Also reset/set below
-set -x
-
-source_directory=$BUILD_SOURCESDIRECTORY
-core_root_directory=
-baseline_core_root_directory=
-architecture=x64
-framework=
-compilation_mode=tiered
-repository=$BUILD_REPOSITORY_NAME
-branch=$BUILD_SOURCEBRANCH
-commit_sha=$BUILD_SOURCEVERSION
-build_number=$BUILD_BUILDNUMBER
-internal=false
-compare=false
-mono_dotnet=
-kind="micro"
-llvm=false
-monointerpreter=false
-monoaot=false
-monoaot_path=
-run_categories="Libraries Runtime"
-csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
-configurations="CompilationMode=$compilation_mode RunKind=$kind"
-perf_fork=""
-perf_fork_branch="main"
-run_from_perf_repo=false
-use_core_run=true
-use_baseline_core_run=true
-using_mono=false
-wasm_bundle_directory=
-using_wasm=false
-use_latest_dotnet=false
-logical_machine=
-javascript_engine="v8"
-javascript_engine_path=""
-iosmono=false
-iosnativeaot=false
-runtimetype=""
-iosllvmbuild=""
-iosstripsymbols=""
-hybridglobalization=""
-maui_version=""
-use_local_commit_time=false
-only_sanity=false
-dotnet_versions=""
-v8_version=""
-
-while (($# > 0)); do
- lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
- case $lowerI in
- --sourcedirectory)
- source_directory=$2
- shift 2
- ;;
- --corerootdirectory)
- core_root_directory=$2
- shift 2
- ;;
- --baselinecorerootdirectory)
- baseline_core_root_directory=$2
- shift 2
- ;;
- --architecture)
- architecture=$2
- shift 2
- ;;
- --framework)
- framework=$2
- shift 2
- ;;
- --compilationmode)
- compilation_mode=$2
- shift 2
- ;;
- --logicalmachine)
- logical_machine=$2
- shift 2
- ;;
- --repository)
- repository=$2
- shift 2
- ;;
- --branch)
- branch=$2
- shift 2
- ;;
- --commitsha)
- commit_sha=$2
- shift 2
- ;;
- --buildnumber)
- build_number=$2
- shift 2
- ;;
- --javascriptengine)
- javascript_engine=$2
- shift 2
- ;;
- --javascriptenginepath)
- javascript_engine_path=$2
- shift 2
- ;;
- --kind)
- kind=$2
- configurations="CompilationMode=$compilation_mode RunKind=$kind"
- shift 2
- ;;
- --runcategories)
- run_categories=$2
- shift 2
- ;;
- --csproj)
- csproj=$2
- shift 2
- ;;
- --internal)
- internal=true
- shift 1
- ;;
- --alpine)
- alpine=true
- shift 1
- ;;
- --llvm)
- llvm=true
- shift 1
- ;;
- --monointerpreter)
- monointerpreter=true
- shift 1
- ;;
- --monoaot)
- monoaot=true
- monoaot_path=$2
- shift 2
- ;;
- --monodotnet)
- mono_dotnet=$2
- shift 2
- ;;
- --wasmbundle)
- wasm_bundle_directory=$2
- shift 2
- ;;
- --wasmaot)
- wasmaot=true
- shift 1
- ;;
- --nodynamicpgo)
- nodynamicpgo=true
- shift 1
- ;;
- --physicalpromotion)
- physicalpromotion=true
- shift 1
- ;;
- --nor2r)
- nor2r=true
- shift 1
- ;;
- --experimentname)
- experimentname=$2
- shift 2
- ;;
- --compare)
- compare=true
- shift 1
- ;;
- --configurations)
- configurations=$2
- shift 2
- ;;
- --latestdotnet)
- use_latest_dotnet=true
- shift 1
- ;;
- --dotnetversions)
- dotnet_versions="$2"
- shift 2
- ;;
- --iosmono)
- iosmono=true
- shift 1
- ;;
- --iosnativeaot)
- iosnativeaot=true
- shift 1
- ;;
- --iosllvmbuild)
- iosllvmbuild=$2
- shift 2
- ;;
- --iosstripsymbols)
- iosstripsymbols=$2
- shift 2
- ;;
- --hybridglobalization)
- hybridglobalization=$2
- shift 2
- ;;
- --mauiversion)
- maui_version=$2
- shift 2
- ;;
- --uselocalcommittime)
- use_local_commit_time=true
- shift 1
- ;;
- --perffork)
- perf_fork=$2
- shift 2
- ;;
- --perfforkbranch)
- perf_fork_branch=$2
- shift 2
- ;;
- --only-sanity)
- only_sanity=true
- shift 1
- ;;
- *)
- echo "Common settings:"
- echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
- echo " --architecture Architecture of the testing being run"
- echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
- echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
- echo " --help Print help and exit"
- echo ""
- echo "Advanced settings:"
- echo " --framework The framework to run, if not running in master"
- echo " --compilationmode The compilation mode if not passing --configurations"
- echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
- echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
- echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
- echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
- echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
- echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
- echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
- echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
- echo " --internal If the benchmarks are running as an official job."
- echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
- echo " --wasmbundle Path to the wasm bundle containing the dotnet, and data needed for helix payload"
- echo " --wasmaot Indicate wasm aot"
- echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
- echo " --dotnetversions Passed as '--dotnet-versions ' to the setup script"
- echo " --alpine Set for runs on Alpine"
- echo " --llvm Set LLVM for Mono runs"
- echo " --iosmono Set for ios Mono/Maui runs"
- echo " --iosnativeaot Set for ios Native AOT runs"
- echo " --iosllvmbuild Set LLVM for iOS Mono/Maui runs"
- echo " --iosstripsymbols Set STRIP_DEBUG_SYMBOLS for iOS Mono/Maui runs"
- echo " --hybridglobalization Set hybrid globalization for iOS Mono/Maui/Wasm runs"
- echo " --mauiversion Set the maui version for Mono/Maui runs"
- echo " --uselocalcommittime Pass local runtime commit time to the setup script"
- echo " --nodynamicpgo Set for No dynamic PGO runs"
- echo " --physicalpromotion Set for runs with physical promotion"
- echo " --nor2r Set for No R2R runs"
- echo " --experimentname Set Experiment Name"
- echo ""
- exit 1
- ;;
- esac
-done
-
-if [[ "$repository" == "dotnet/performance" || "$repository" == "dotnet-performance" ]]; then
- run_from_perf_repo=true
-fi
-
-if [ -z "$configurations" ]; then
- configurations="CompilationMode=$compilation_mode"
-fi
-
-if [ -z "$core_root_directory" ]; then
- use_core_run=false
-fi
-
-if [ -z "$baseline_core_root_directory" ]; then
- use_baseline_core_run=false
-fi
-
-payload_directory=$source_directory/Payload
-performance_directory=$payload_directory/performance
-benchmark_directory=$payload_directory/BenchmarkDotNet
-workitem_directory=$source_directory/workitem
-extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
-perflab_arguments=
-queue=Ubuntu.2204.Amd64.Open
-creator=$BUILD_DEFINITIONNAME
-helix_source_prefix="pr"
-
-if [[ "$internal" == true ]]; then
- perflab_arguments="--upload-to-perflab-container"
- helix_source_prefix="official"
- creator=
- extra_benchmark_dotnet_arguments=
-
- if [[ "$logical_machine" == "perfiphone12mini" ]]; then
- queue=OSX.13.Amd64.Iphone.Perf
- elif [[ "$logical_machine" == "perfampere" ]]; then
- queue=Ubuntu.2204.Arm64.Perf
- elif [[ "$logical_machine" == "perfviper" ]]; then
- queue=Ubuntu.2204.Amd64.Viper.Perf
- elif [[ "$logical_machine" == "cloudvm" ]]; then
- queue=Ubuntu.2204.Amd64
- elif [[ "$architecture" == "arm64" ]]; then
- queue=Ubuntu.1804.Arm64.Perf
- else
- if [[ "$logical_machine" == "perfowl" ]]; then
- queue=Ubuntu.2204.Amd64.Owl.Perf
- elif [[ "$logical_machine" == "perftiger_crossgen" ]]; then
- queue=Ubuntu.1804.Amd64.Tiger.Perf
- else
- queue=Ubuntu.2204.Amd64.Tiger.Perf
- fi
- fi
-
- if [[ "$alpine" == "true" ]]; then
- queue=alpine.amd64.tiger.perf
- fi
-else
- if [[ "$architecture" == "arm64" ]]; then
- queue=ubuntu.1804.armarch.open
- else
- queue=Ubuntu.2204.Amd64.Open
- fi
-
- if [[ "$alpine" == "true" ]]; then
- queue=alpine.amd64.tiger.perf
- fi
-fi
-
-if [[ -n "$mono_dotnet" && "$monointerpreter" == "false" ]]; then
- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
-fi
-
-_BuildConfig="$architecture.$kind.$framework"
-
-if [[ -n "$wasm_bundle_directory" ]]; then
- if [[ "$wasmaot" == "true" ]]; then
- configurations="CompilationMode=wasm AOT=true RunKind=$kind"
- _BuildConfig="wasmaot.$_BuildConfig"
- else
- configurations="CompilationMode=wasm RunKind=$kind"
- _BuildConfig="wasm.$_BuildConfig"
- fi
- if [[ "$javascript_engine" == "javascriptcore" ]]; then
- configurations="$configurations JSEngine=javascriptcore"
- fi
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
-fi
-
-if [[ -n "$mono_dotnet" && "$monointerpreter" == "true" ]]; then
- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
-fi
-
-if [[ "$monoaot" == "true" ]]; then
- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoAOT NoWASM"
-fi
-
-if [[ "$iosmono" == "true" ]]; then
- runtimetype="Mono"
- configurations="$configurations iOSLlvmBuild=$iosllvmbuild iOSStripSymbols=$iosstripsymbols RuntimeType=$runtimetype"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments"
-fi
-
-if [[ "$iosnativeaot" == "true" ]]; then
- runtimetype="NativeAOT"
- configurations="$configurations iOSStripSymbols=$iosstripsymbols RuntimeType=$runtimetype"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments"
-fi
-
-if [[ "$nodynamicpgo" == "true" ]]; then
- configurations="$configurations PGOType=nodynamicpgo"
-fi
-
-if [[ "$physicalpromotion" == "true" ]]; then
- configurations="$configurations PhysicalPromotionType=physicalpromotion"
-fi
-
-if [[ "$nor2r" == "true" ]]; then
- configurations="$configurations R2RType=nor2r"
-fi
-
-if [[ ! -z "$experimentname" ]]; then
- configurations="$configurations ExperimentName=$experimentname"
- if [[ "$experimentname" == "memoryRandomization" ]]; then
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --memoryRandomization true"
- fi
-fi
-
-if [[ "$(echo "$hybridglobalization" | tr '[:upper:]' '[:lower:]')" == "true" ]]; then # convert to lowercase to test
- configurations="$configurations HybridGlobalization=True" # Force True for consistency
-fi
-
-
-
-cleaned_branch_name="main"
-if [[ $branch == *"refs/heads/release"* ]]; then
- cleaned_branch_name=${branch/refs\/heads\//}
-fi
-common_setup_arguments="--channel $cleaned_branch_name --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
-setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
-
-if [[ "$internal" != true ]]; then
- setup_arguments="$setup_arguments --not-in-lab"
-fi
-
-if [[ "$use_local_commit_time" == true ]]; then
- local_commit_time=$(git show -s --format=%ci $commit_sha)
- setup_arguments="$setup_arguments --commit-time \"$local_commit_time\""
-fi
-
-if [[ "$run_from_perf_repo" == true ]]; then
- payload_directory=
- workitem_directory=$source_directory
- performance_directory=$workitem_directory
- setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
-else
- if [[ -n "$perf_fork" ]]; then
- git clone --branch $perf_fork_branch --depth 1 --quiet $perf_fork $performance_directory
- else
- git clone --branch main --depth 1 --quiet https://github.com/dotnet/performance.git $performance_directory
- fi
- # uncomment to use BenchmarkDotNet sources instead of nuget packages
- # git clone https://github.com/dotnet/BenchmarkDotNet.git $benchmark_directory
-
- (cd $performance_directory; git show -s HEAD)
-
- docs_directory=$performance_directory/docs
- mv $docs_directory $workitem_directory
-fi
-
-if [[ -n "$maui_version" ]]; then
- setup_arguments="$setup_arguments --maui-version $maui_version"
-fi
-
-if [[ -n "$wasm_bundle_directory" ]]; then
- using_wasm=true
- wasm_bundle_directory_path=$payload_directory
- mv $wasm_bundle_directory/* $wasm_bundle_directory_path
- wasm_args="--expose_wasm"
- if [ "$javascript_engine" == "v8" ]; then
- # for es6 module support
- wasm_args="$wasm_args --module"
-
- # get required version
- if [[ -z "$v8_version" ]]; then
- v8_version=`grep linux_V8Version $source_directory/eng/testing/BrowserVersions.props | sed -e 's,.*>\([^\<]*\)<.*,\1,g' | cut -d. -f 1-3`
- echo "V8 version: $v8_version"
- fi
- if [[ -z "$javascript_engine_path" ]]; then
- javascript_engine_path="/home/helixbot/.jsvu/bin/v8-${v8_version}"
- fi
- fi
-
- if [[ -z "$javascript_engine_path" ]]; then
- javascript_engine_path="/home/helixbot/.jsvu/bin/$javascript_engine"
- fi
-
- # Workaround: escaping the quotes around `--wasmArgs=..` so they get retained for the actual command line
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmEngine $javascript_engine_path \\\"--wasmArgs=$wasm_args\\\" --cli \$HELIX_CORRELATION_PAYLOAD/dotnet/dotnet --wasmDataDir \$HELIX_CORRELATION_PAYLOAD/wasm-data"
- if [[ "$wasmaot" == "true" ]]; then
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --aotcompilermode wasm --buildTimeout 3600"
- fi
- setup_arguments="$setup_arguments --dotnet-path $wasm_bundle_directory_path/dotnet"
-fi
-
-if [[ -n "$mono_dotnet" && "$monoaot" == "false" ]]; then
- using_mono=true
- mono_dotnet_path=$payload_directory/dotnet-mono
- mv $mono_dotnet $mono_dotnet_path
-fi
-
-if [[ -n "$dotnet_versions" ]]; then
- setup_arguments="$setup_arguments --dotnet-versions $dotnet_versions"
-fi
-
-if [[ "$nodynamicpgo" == "true" ]]; then
- setup_arguments="$setup_arguments --no-dynamic-pgo"
-fi
-
-if [[ "$physicalpromotion" == "true" ]]; then
- setup_arguments="$setup_arguments --physical-promotion"
-fi
-
-if [[ "$nor2r" == "true" ]]; then
- setup_arguments="$setup_arguments --no-r2r"
-fi
-
-if [[ ! -z "$experimentname" ]]; then
- setup_arguments="$setup_arguments --experiment-name $experimentname"
-fi
-
-if [[ "$monoaot" == "true" ]]; then
- monoaot_dotnet_path=$payload_directory/monoaot
- mv $monoaot_path $monoaot_dotnet_path
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --runtimes monoaotllvm --aotcompilerpath \$HELIX_CORRELATION_PAYLOAD/monoaot/mono-aot-cross --customruntimepack \$HELIX_CORRELATION_PAYLOAD/monoaot/pack --aotcompilermode llvm"
-fi
-
-extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --logBuildOutput --generateBinLog"
-
-if [[ "$use_core_run" == true ]]; then
- new_core_root=$payload_directory/Core_Root
- mv $core_root_directory $new_core_root
-fi
-
-if [[ "$use_baseline_core_run" == true ]]; then
- new_baseline_core_root=$payload_directory/Baseline_Core_Root
- mv $baseline_core_root_directory $new_baseline_core_root
-fi
-
-if [[ "$iosmono" == "true" || "$iosnativeaot" == "true" ]]; then
- mkdir -p $payload_directory/iosHelloWorld && cp -rv $source_directory/iosHelloWorld $payload_directory/iosHelloWorld
- mkdir -p $payload_directory/iosHelloWorldZip && cp -rv $source_directory/iosHelloWorldZip $payload_directory/iosHelloWorldZip
-
- find "$payload_directory/iosHelloWorldZip/" -type f -name "*.zip" -execdir mv {} "$payload_directory/iosHelloWorldZip/iOSSampleApp.zip" \;
-fi
-
-ci=true
-
-_script_dir=$(pwd)/eng/common
-. "$_script_dir/pipeline-logging-functions.sh"
-
-# Prevent vso[task.setvariable to be erroneously processed
-set +x
-
-# Make sure all of our variables are available for future steps
-Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
-Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
-Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
-Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
-Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
-Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
-Write-PipelineSetVariable -name "_BuildConfig" -value "$_BuildConfig" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
-Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
-Write-PipelineSetVariable -name "MonoAOT" -value "$monoaot" -is_multi_job_variable false
-Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
-Write-PipelineSetVariable -Name 'iOSLlvmBuild' -Value "$iosllvmbuild" -is_multi_job_variable false
-Write-PipelineSetVariable -Name 'iOSStripSymbols' -Value "$iosstripsymbols" -is_multi_job_variable false
-Write-PipelineSetVariable -Name 'hybridGlobalization' -Value "$hybridglobalization" -is_multi_job_variable false
-Write-PipelineSetVariable -Name 'RuntimeType' -Value "$runtimetype" -is_multi_job_variable false
-Write-PipelineSetVariable -name "OnlySanityCheck" -value "$only_sanity" -is_multi_job_variable false
-Write-PipelineSetVariable -name "V8Version" -value "$v8_version" -is_multi_job_variable false
-
-# Put it back to what was set on top of this script
-set -x
diff --git a/eng/testing/tests.ioslike.targets b/eng/testing/tests.ioslike.targets
index dde49575d7a..a59cce6c49a 100644
--- a/eng/testing/tests.ioslike.targets
+++ b/eng/testing/tests.ioslike.targets
@@ -71,6 +71,7 @@
+
@@ -93,8 +94,7 @@
- <_BundlePdbFiles Include="$([System.IO.Path]::ChangeExtension('%(AppleAssembliesToBundle.Identity)', '.pdb'))" />
-
+
@@ -213,7 +213,10 @@
<_IsNative>false
-
+
+
+
+
diff --git a/eng/testing/tests.props b/eng/testing/tests.props
index 2c555d1abaf..9cc17e209ae 100644
--- a/eng/testing/tests.props
+++ b/eng/testing/tests.props
@@ -15,7 +15,11 @@
$([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'AppleTestRunner', '$(Configuration)', '$(NetCoreAppCurrent)'))$([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'AndroidTestRunner', '$(Configuration)', '$(NetCoreAppCurrent)'))
- $([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'WasmTestRunner', '$(Configuration)', '$(NetCoreAppCurrent)'))
+
+ <_WasmTestRunnerTFM>$(NetCoreAppCurrent)
+ <_WasmTestRunnerTFM Condition="'$(TargetsWasi)' == 'true'">$(NetCoreAppCurrent)-wasi
+ <_WasmTestRunnerTFM Condition="'$(TargetsBrowser)' == 'true'">$(NetCoreAppCurrent)-browser
+ $([MSBuild]::NormalizeDirectory('$(ArtifactsBinDir)', 'WasmTestRunner', '$(Configuration)', '$(_WasmTestRunnerTFM)'))$(OutputRID)true
diff --git a/eng/testing/tests.targets b/eng/testing/tests.targets
index 4883ed549ea..5eb33b12baa 100644
--- a/eng/testing/tests.targets
+++ b/eng/testing/tests.targets
@@ -195,7 +195,7 @@ TEST_ARCH=$(_AndroidArchitecture)
-
+
diff --git a/eng/testing/tests.wasi.targets b/eng/testing/tests.wasi.targets
index 148026015be..e076ac18c4b 100644
--- a/eng/testing/tests.wasi.targets
+++ b/eng/testing/tests.wasi.targets
@@ -29,7 +29,7 @@
<_AppArgs Condition="'$(IsFunctionalTest)' != 'true' and '$(WasmSingleFileBundle)' != 'true'">$(_AppArgs) managed/$(AssemblyName).dll
<_AppArgs Condition="'$(IsFunctionalTest)' != 'true' and '$(WasmSingleFileBundle)' == 'true'">$(_AppArgs) $(AssemblyName).dll
- <_AppArgs Condition="'$(WasmTestAppArgs)' != ''">$(_AppArgs) -- $(WasmTestAppArgs)
+ <_AppArgs Condition="'$(WasmTestAppArgs)' != ''">$(_AppArgs) $(WasmTestAppArgs)
$(WasmXHarnessMonoArgs) --env=XHARNESS_LOG_TEST_START=true
@@ -49,6 +49,8 @@
<_XHarnessArgs Condition="'$(WasmXHarnessTestsTimeout)' != ''" >$(_XHarnessArgs) "--timeout=$(WasmXHarnessTestsTimeout)"
<_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--wasm --engine-arg=max-wasm-stack=134217728
<_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--wasi --engine-arg=http
+ <_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--wasi --engine-arg=inherit-network
+ <_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--wasi --engine-arg=allow-ip-name-lookup
<_XHarnessArgs >$(_XHarnessArgs) --engine-arg=--env --engine-arg=DOTNET_WASI_PRINT_EXIT_CODE=1
<_XHarnessArgs Condition="'$(WasmXHarnessArgsCli)' != ''" >$(_XHarnessArgs) $(WasmXHarnessArgsCli)
diff --git a/eng/testing/xunit/xunit.console.targets b/eng/testing/xunit/xunit.console.targets
index dda953aafbe..abbf879cd5e 100644
--- a/eng/testing/xunit/xunit.console.targets
+++ b/eng/testing/xunit/xunit.console.targets
@@ -4,6 +4,11 @@
truetestResults.xmltrue
+
+ $(XunitConsoleNetCore21AppPath)
+ $(XunitConsole472Path)
+
+ true
@@ -12,10 +17,15 @@
+
+ xunit.console.dll
+ xunit.console.exe
+
+
<_depsFileArgument Condition="'$(GenerateDependencyFile)' == 'true'">--depsfile $(AssemblyName).deps.json
- "$(RunScriptHost)" exec --runtimeconfig $(AssemblyName).runtimeconfig.json $(_depsFileArgument) xunit.console.dll
- xunit.console.exe
+ "$(RunScriptHost)" exec --runtimeconfig $(AssemblyName).runtimeconfig.json $(_depsFileArgument) $(XunitConsolePath)
+ $(XunitConsolePath)$(RunScriptCommand) $(TargetFileName)$(RunScriptCommand) -xml $(TestResultsName)
@@ -74,9 +84,8 @@
-
+
-
trueIL verification library.
+
+ 9.0.0 - In the ILVerify.IResolver interface, the type of the first parameter of each method is now System.Reflection.Metadata.AssemblyNameInfo rather than System.Reflection.AssemblyName.
+
diff --git a/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj b/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
index 115e29762b4..e027d8d7d75 100644
--- a/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
+++ b/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
@@ -25,7 +25,6 @@
-
@@ -111,6 +110,7 @@
+
@@ -140,6 +140,7 @@
+
@@ -195,7 +196,6 @@
-
@@ -207,8 +207,6 @@
-
-
@@ -231,7 +229,6 @@
-
@@ -269,8 +266,6 @@
-
-
@@ -291,6 +286,9 @@
Common\Interop\Windows\OleAut32\Interop.VariantClear.cs
+
+ Common\Interop\Windows\OleAut32\Interop.VariantChangeTypeEx.cs
+
@@ -302,11 +300,6 @@
-
-
-
-
-
@@ -317,15 +310,15 @@
-
- src\System\Diagnostics\Eventing\Generated\NativeRuntimeEventSource.CoreCLR.cs
+
+ src\System\Diagnostics\Eventing\NativeRuntimeEventSource.Generated.cs
-
+
<_PythonWarningParameter>-Wall
<_PythonWarningParameter Condition="'$(MSBuildTreatWarningsAsErrors)' == 'true'">$(_PythonWarningParameter) -Werror
diff --git a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Descriptors.Shared.xml b/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Descriptors.Shared.xml
index 5bf4d422a76..0c7a6d7e625 100644
--- a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Descriptors.Shared.xml
+++ b/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Descriptors.Shared.xml
@@ -7,8 +7,6 @@
-
-
diff --git a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.Windows.xml b/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.Windows.xml
deleted file mode 100644
index 28a6da5f793..00000000000
--- a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.Windows.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-
-
-
-
-
diff --git a/src/coreclr/System.Private.CoreLib/src/System/ThrowHelper.cs b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/CompilerHelpers/ThrowHelpers.cs
similarity index 90%
rename from src/coreclr/System.Private.CoreLib/src/System/ThrowHelper.cs
rename to src/coreclr/System.Private.CoreLib/src/Internal/Runtime/CompilerHelpers/ThrowHelpers.cs
index 254cc0df8a2..fd01c28423e 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/ThrowHelper.cs
+++ b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/CompilerHelpers/ThrowHelpers.cs
@@ -1,19 +1,21 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
-namespace System
+namespace Internal.Runtime.CompilerHelpers
{
- internal static unsafe partial class ThrowHelper
+ internal static unsafe partial class ThrowHelpers
{
[DoesNotReturn]
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "ExceptionNative_ThrowAmbiguousResolutionException")]
private static partial void ThrowAmbiguousResolutionException(MethodTable* targetType, MethodTable* interfaceType, void* methodDesc);
[DoesNotReturn]
+ [DebuggerHidden]
internal static void ThrowAmbiguousResolutionException(
void* method, // MethodDesc*
void* interfaceType, // MethodTable*
@@ -27,6 +29,7 @@ internal static void ThrowAmbiguousResolutionException(
private static partial void ThrowEntryPointNotFoundException(MethodTable* targetType, MethodTable* interfaceType, void* methodDesc);
[DoesNotReturn]
+ [DebuggerHidden]
internal static void ThrowEntryPointNotFoundException(
void* method, // MethodDesc*
void* interfaceType, // MethodTable*
diff --git a/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/InMemoryAssemblyLoader.cs b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/InMemoryAssemblyLoader.cs
index 4a7d0a88c19..6929c4752ed 100644
--- a/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/InMemoryAssemblyLoader.cs
+++ b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/InMemoryAssemblyLoader.cs
@@ -15,6 +15,7 @@ namespace Internal.Runtime.InteropServices
[SupportedOSPlatform("windows")]
internal static class InMemoryAssemblyLoader
{
+ [FeatureSwitchDefinition("System.Runtime.InteropServices.EnableCppCLIHostActivation")]
private static bool IsSupported { get; } = InitializeIsSupported();
private static bool InitializeIsSupported() => AppContext.TryGetSwitch("System.Runtime.InteropServices.EnableCppCLIHostActivation", out bool isSupported) ? isSupported : true;
diff --git a/src/coreclr/System.Private.CoreLib/src/Microsoft/Win32/OAVariantLib.cs b/src/coreclr/System.Private.CoreLib/src/Microsoft/Win32/OAVariantLib.cs
index a51ec6f0132..bf6952f333e 100644
--- a/src/coreclr/System.Private.CoreLib/src/Microsoft/Win32/OAVariantLib.cs
+++ b/src/coreclr/System.Private.CoreLib/src/Microsoft/Win32/OAVariantLib.cs
@@ -20,6 +20,7 @@
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.Marshalling;
+using System.StubHelpers;
namespace Microsoft.Win32
{
@@ -73,8 +74,7 @@ internal static unsafe partial class OAVariantLib
if (source is int || source is uint)
{
uint sourceData = source is int ? (uint)(int)source : (uint)source;
- // Int32/UInt32 can be converted to System.Drawing.Color
- Variant.ConvertOleColorToSystemColor(ObjectHandleOnStack.Create(ref result), sourceData, targetClass.TypeHandle.Value);
+ result = ColorMarshaler.ConvertToManaged((int)sourceData);
Debug.Assert(result != null);
return result;
}
@@ -151,19 +151,10 @@ private static ComVariant ToOAVariant(object input)
null => default,
Missing => throw new NotSupportedException(SR.NotSupported_ChangeType),
DBNull => ComVariant.Null,
- _ => GetComIPFromObjectRef(input) // Convert the object to an IDispatch/IUnknown pointer.
+ _ => Variant.GetIUnknownOrIDispatchFromObject(input) // Convert the object to an IDispatch/IUnknown pointer.
};
}
- private static ComVariant GetComIPFromObjectRef(object? obj)
- {
- IntPtr pUnk = GetIUnknownOrIDispatchForObject(ObjectHandleOnStack.Create(ref obj), out bool isIDispatch);
- return ComVariant.CreateRaw(isIDispatch ? VarEnum.VT_DISPATCH : VarEnum.VT_UNKNOWN, pUnk);
- }
-
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "MarshalNative_GetIUnknownOrIDispatchForObject")]
- private static partial IntPtr GetIUnknownOrIDispatchForObject(ObjectHandleOnStack o, [MarshalAs(UnmanagedType.Bool)] out bool isIDispatch);
-
private static object? FromOAVariant(ComVariant input) =>
input.VarType switch
{
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Debugger.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Debugger.cs
index 9a51c47cb02..576c1fe696b 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Debugger.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Debugger.cs
@@ -11,15 +11,15 @@ namespace System.Diagnostics
{
public static partial class Debugger
{
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "DebugDebugger_Break")]
+ private static partial void BreakInternal();
+
// Break causes a breakpoint to be signalled to an attached debugger. If no debugger
// is attached, the user is asked if they want to attach a debugger. If yes, then the
// debugger is launched.
[MethodImpl(MethodImplOptions.NoInlining)]
public static void Break() => BreakInternal();
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void BreakInternal();
-
// Launch launches & attaches a debugger to the process. If a debugger is already attached,
// nothing happens.
//
@@ -30,11 +30,6 @@ public static partial class Debugger
// See code:NotifyOfCrossThreadDependency for more details.
private sealed class CrossThreadDependencyNotification : ICustomDebuggerNotification { }
- // Do not inline the slow path
- [MethodImpl(MethodImplOptions.NoInlining)]
- private static void NotifyOfCrossThreadDependencySlow() =>
- CustomNotification(new CrossThreadDependencyNotification());
-
// Sends a notification to the debugger to indicate that execution is about to enter a path
// involving a cross thread dependency. A debugger that has opted into this type of notification
// can take appropriate action on receipt. For example, performing a funceval normally requires
@@ -49,6 +44,14 @@ public static void NotifyOfCrossThreadDependency()
{
NotifyOfCrossThreadDependencySlow();
}
+
+ // Do not inline the slow path
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static void NotifyOfCrossThreadDependencySlow()
+ {
+ var notify = new CrossThreadDependencyNotification();
+ CustomNotification(ObjectHandleOnStack.Create(ref notify));
+ }
}
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "DebugDebugger_Launch")]
@@ -89,7 +92,7 @@ public static extern bool IsAttached
// Posts a custom notification for the attached debugger. If there is no
// debugger attached, has no effect. The debugger may or may not
// report the notification depending on its settings.
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern void CustomNotification(ICustomDebuggerNotification data);
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "DebugDebugger_CustomNotification")]
+ private static partial void CustomNotification(ObjectHandleOnStack data);
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrame.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrame.CoreCLR.cs
index b68e4f2884b..8c1850d31e8 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrame.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrame.CoreCLR.cs
@@ -30,9 +30,9 @@ internal StackFrame(StackFrameHelper stackFrameHelper, int skipFrames, bool need
private void BuildStackFrame(int skipFrames, bool needFileInfo)
{
- StackFrameHelper StackF = new StackFrameHelper(null);
+ StackFrameHelper StackF = new StackFrameHelper();
- StackF.InitializeSourceInfo(0, needFileInfo, null);
+ StackF.InitializeSourceInfo(needFileInfo, null);
int iNumOfFrames = StackF.GetNumberOfFrames();
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrameHelper.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrameHelper.cs
index 8e90b6a2eec..f10af9c2347 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrameHelper.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackFrameHelper.cs
@@ -13,7 +13,6 @@ namespace System.Diagnostics
// VM\DebugDebugger.h. The binder will catch some of these layout problems.
internal sealed class StackFrameHelper
{
- private Thread? targetThread;
private int[]? rgiOffset;
private int[]? rgiILOffset;
@@ -48,9 +47,8 @@ private delegate void GetSourceLineInfoDelegate(Assembly? assembly, string assem
[ThreadStatic]
private static int t_reentrancy;
- public StackFrameHelper(Thread? target)
+ public StackFrameHelper()
{
- targetThread = target;
rgMethodHandle = null;
rgiMethodToken = null;
rgiOffset = null;
@@ -85,9 +83,9 @@ public StackFrameHelper(Thread? target)
// done by GetStackFramesInternal (on Windows for old PDB format).
//
- internal void InitializeSourceInfo(int iSkip, bool fNeedFileInfo, Exception? exception)
+ internal void InitializeSourceInfo(bool fNeedFileInfo, Exception? exception)
{
- StackTrace.GetStackFramesInternal(this, iSkip, fNeedFileInfo, exception);
+ StackTrace.GetStackFramesInternal(this, fNeedFileInfo, exception);
if (!fNeedFileInfo)
return;
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackTrace.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackTrace.CoreCLR.cs
index 26e002f9901..5d4bc6008e6 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackTrace.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/StackTrace.CoreCLR.cs
@@ -3,13 +3,17 @@
using System.Reflection;
using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
namespace System.Diagnostics
{
public partial class StackTrace
{
- [MethodImpl(MethodImplOptions.InternalCall)]
- internal static extern void GetStackFramesInternal(StackFrameHelper sfh, int iSkip, bool fNeedFileInfo, Exception? e);
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "StackTrace_GetStackFramesInternal")]
+ private static partial void GetStackFramesInternal(ObjectHandleOnStack sfh, [MarshalAs(UnmanagedType.Bool)] bool fNeedFileInfo, ObjectHandleOnStack e);
+
+ internal static void GetStackFramesInternal(StackFrameHelper sfh, bool fNeedFileInfo, Exception? e)
+ => GetStackFramesInternal(ObjectHandleOnStack.Create(ref sfh), fNeedFileInfo, ObjectHandleOnStack.Create(ref e));
internal static int CalculateFramesToSkip(StackFrameHelper StackF, int iNumFrames)
{
@@ -57,9 +61,9 @@ private void CaptureStackTrace(int skipFrames, bool fNeedFileInfo, Exception? e)
{
_methodsToSkip = skipFrames;
- StackFrameHelper StackF = new StackFrameHelper(null);
+ StackFrameHelper StackF = new StackFrameHelper();
- StackF.InitializeSourceInfo(0, fNeedFileInfo, e);
+ StackF.InitializeSourceInfo(fNeedFileInfo, e);
_numOfFrames = StackF.GetNumberOfFrames();
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Exception.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Exception.CoreCLR.cs
index 79944b1cca8..1029f3e570e 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Exception.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Exception.CoreCLR.cs
@@ -117,9 +117,6 @@ internal void InternalPreserveStackTrace()
[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void PrepareForForeignExceptionRaise();
- [MethodImpl(MethodImplOptions.InternalCall)]
- private static extern object? GetFrozenStackTrace(Exception exception);
-
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern uint GetExceptionCount();
@@ -226,9 +223,14 @@ public DispatchState(
}
}
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "ExceptionNative_GetFrozenStackTrace")]
+ private static partial void GetFrozenStackTrace(ObjectHandleOnStack exception, ObjectHandleOnStack stackTrace);
+
internal DispatchState CaptureDispatchState()
{
- object? stackTrace = GetFrozenStackTrace(this);
+ Exception _this = this;
+ object? stackTrace = null;
+ GetFrozenStackTrace(ObjectHandleOnStack.Create(ref _this), ObjectHandleOnStack.Create(ref stackTrace));
return new DispatchState(stackTrace,
_remoteStackTraceString, _ipForWatsonBuckets, _watsonBuckets);
diff --git a/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
index 1bd94635a26..f8246642b9e 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
@@ -103,8 +103,8 @@ internal enum GC_ALLOC_FLAGS
GC_ALLOC_PINNED_OBJECT_HEAP = 64,
};
- [MethodImpl(MethodImplOptions.InternalCall)]
- internal static extern Array AllocateNewArray(IntPtr typeHandle, int length, GC_ALLOC_FLAGS flags);
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "GCInterface_AllocateNewArray")]
+ private static partial void AllocateNewArray(IntPtr typeHandlePtr, int length, GC_ALLOC_FLAGS flags, ObjectHandleOnStack ret);
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "GCInterface_GetTotalMemory")]
private static partial long GetTotalMemory();
@@ -791,16 +791,25 @@ public static unsafe T[] AllocateUninitializedArray(int length, bool pinned =
{
return new T[length];
}
-
#endif
}
- // Runtime overrides GC_ALLOC_ZEROING_OPTIONAL if the type contains references, so we don't need to worry about that.
- GC_ALLOC_FLAGS flags = GC_ALLOC_FLAGS.GC_ALLOC_ZEROING_OPTIONAL;
- if (pinned)
- flags |= GC_ALLOC_FLAGS.GC_ALLOC_PINNED_OBJECT_HEAP;
+ return AllocateNewArrayWorker(length, pinned);
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static T[] AllocateNewArrayWorker(int length, bool pinned)
+ {
+ // Runtime overrides GC_ALLOC_ZEROING_OPTIONAL if the type contains references, so we don't need to worry about that.
+ GC_ALLOC_FLAGS flags = GC_ALLOC_FLAGS.GC_ALLOC_ZEROING_OPTIONAL;
+ if (pinned)
+ {
+ flags |= GC_ALLOC_FLAGS.GC_ALLOC_PINNED_OBJECT_HEAP;
+ }
- return Unsafe.As(AllocateNewArray(RuntimeTypeHandle.ToIntPtr(typeof(T[]).TypeHandle), length, flags));
+ T[]? result = null;
+ AllocateNewArray(RuntimeTypeHandle.ToIntPtr(typeof(T[]).TypeHandle), length, flags, ObjectHandleOnStack.Create(ref result));
+ return result!;
+ }
}
///
@@ -818,7 +827,9 @@ public static T[] AllocateArray(int length, bool pinned = false) // T[] rathe
flags = GC_ALLOC_FLAGS.GC_ALLOC_PINNED_OBJECT_HEAP;
}
- return Unsafe.As(AllocateNewArray(RuntimeTypeHandle.ToIntPtr(typeof(T[]).TypeHandle), length, flags));
+ T[]? result = null;
+ AllocateNewArray(RuntimeTypeHandle.ToIntPtr(typeof(T[]).TypeHandle), length, flags, ObjectHandleOnStack.Create(ref result));
+ return result!;
}
[MethodImpl(MethodImplOptions.InternalCall)]
diff --git a/src/coreclr/System.Private.CoreLib/src/System/IO/Stream.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/IO/Stream.CoreCLR.cs
new file mode 100644
index 00000000000..18e9ca018f4
--- /dev/null
+++ b/src/coreclr/System.Private.CoreLib/src/System/IO/Stream.CoreCLR.cs
@@ -0,0 +1,43 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+namespace System.IO
+{
+ public abstract unsafe partial class Stream
+ {
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Stream_HasOverriddenSlow")]
+ [return: MarshalAs(UnmanagedType.Bool)]
+ private static partial bool HasOverriddenSlow(MethodTable* pMT, [MarshalAs(UnmanagedType.Bool)] bool isRead);
+
+ private bool HasOverriddenBeginEndRead()
+ {
+ MethodTable* pMT = RuntimeHelpers.GetMethodTable(this);
+ bool res = pMT->AuxiliaryData->HasCheckedStreamOverride
+ ? pMT->AuxiliaryData->IsStreamOverriddenRead
+ : HasOverriddenReadSlow(pMT);
+ GC.KeepAlive(this);
+ return res;
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static bool HasOverriddenReadSlow(MethodTable* pMT)
+ => HasOverriddenSlow(pMT, isRead: true);
+ }
+
+ private bool HasOverriddenBeginEndWrite()
+ {
+ MethodTable* pMT = RuntimeHelpers.GetMethodTable(this);
+ bool res = pMT->AuxiliaryData->HasCheckedStreamOverride
+ ? pMT->AuxiliaryData->IsStreamOverriddenWrite
+ : HasOverriddenWriteSlow(pMT);
+ GC.KeepAlive(this);
+ return res;
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static bool HasOverriddenWriteSlow(MethodTable* pMT)
+ => HasOverriddenSlow(pMT, isRead: false);
+ }
+ }
+}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
index 940d1622bad..ce1c810d8bc 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
@@ -3,15 +3,32 @@
using System.Diagnostics;
using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
namespace System
{
public partial class Object
{
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "ObjectNative_GetTypeSlow")]
+ private static unsafe partial void GetTypeSlow(MethodTable* methodTable, ObjectHandleOnStack ret);
+
// Returns a Type object which represent this object instance.
[Intrinsic]
- [MethodImpl(MethodImplOptions.InternalCall)]
- public extern Type GetType();
+ public unsafe Type GetType()
+ {
+ MethodTable* pMT = RuntimeHelpers.GetMethodTable(this);
+ Type type = pMT->AuxiliaryData->ExposedClassObject ?? GetTypeWorker(pMT);
+ GC.KeepAlive(this);
+ return type;
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static Type GetTypeWorker(MethodTable* pMT)
+ {
+ Type? type = null;
+ GetTypeSlow(pMT, ObjectHandleOnStack.Create(ref type));
+ return type!;
+ }
+ }
// Returns a new object instance that is a memberwise copy of this
// object. This is always a shallow copy of the instance. The method is protected
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/Metadata/MetadataUpdater.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/Metadata/MetadataUpdater.cs
index ff1c5b85503..413a970afcf 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/Metadata/MetadataUpdater.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/Metadata/MetadataUpdater.cs
@@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
@@ -59,6 +60,7 @@ public static void ApplyUpdate(Assembly assembly, ReadOnlySpan metadataDel
///
/// Returns true if the apply assembly update is enabled and available.
///
+ [FeatureSwitchDefinition("System.Reflection.Metadata.MetadataUpdater.IsSupported")]
public static bool IsSupported { get; } = IsApplyUpdateSupported();
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
index ccd3d9aa9ef..95bcf065264 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
@@ -115,8 +115,6 @@ public override string? CodeBase
}
}
- internal RuntimeAssembly GetNativeHandle() => this;
-
// If the assembly is copied before it is loaded, the codebase will be set to the
// actual file loaded if copiedName is true. If it is false, then the original code base
// is returned.
@@ -263,7 +261,7 @@ public override Type[] GetExportedTypes()
public override IEnumerable DefinedTypes
{
[RequiresUnreferencedCode("Types might be removed")]
- get => GetManifestModule(GetNativeHandle()).GetDefinedTypes();
+ get => GetManifestModule().GetDefinedTypes();
}
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "AssemblyNative_GetIsCollectible")]
@@ -324,7 +322,7 @@ public override void GetObjectData(SerializationInfo info, StreamingContext cont
public override Module ManifestModule =>
// We don't need to return the "external" ModuleBuilder because
// it is meant to be read-only
- GetManifestModule(GetNativeHandle());
+ GetManifestModule();
public override object[] GetCustomAttributes(bool inherit)
{
@@ -586,9 +584,17 @@ private CultureInfo GetLocale()
}
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern bool FCallIsDynamic(RuntimeAssembly assembly);
+ private static extern bool GetIsDynamic(IntPtr assembly);
- public override bool IsDynamic => FCallIsDynamic(GetNativeHandle());
+ public override bool IsDynamic
+ {
+ get
+ {
+ bool isDynamic = GetIsDynamic(GetUnderlyingNativeHandle());
+ GC.KeepAlive(this); // We directly pass the native handle above - make sure this object stays alive for the call
+ return isDynamic;
+ }
+ }
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "AssemblyNative_GetSimpleName")]
private static partial void GetSimpleName(QCallAssembly assembly, StringHandleOnStack retSimpleName);
@@ -701,8 +707,24 @@ public override Module[] GetLoadedModules(bool getResourceModules)
return GetModulesInternal(false, getResourceModules);
}
+ private RuntimeModule GetManifestModule()
+ {
+ return GetManifestModule(this) ?? GetManifestModuleWorker(this);
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static RuntimeModule GetManifestModuleWorker(RuntimeAssembly assembly)
+ {
+ RuntimeModule? module = null;
+ GetManifestModuleSlow(ObjectHandleOnStack.Create(ref assembly), ObjectHandleOnStack.Create(ref module));
+ return module!;
+ }
+ }
+
[MethodImpl(MethodImplOptions.InternalCall)]
- internal static extern RuntimeModule GetManifestModule(RuntimeAssembly assembly);
+ private static extern RuntimeModule? GetManifestModule(RuntimeAssembly assembly);
+
+ [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "AssemblyHandle_GetManifestModuleSlow")]
+ private static partial void GetManifestModuleSlow(ObjectHandleOnStack assembly, ObjectHandleOnStack module);
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern int GetToken(RuntimeAssembly assembly);
@@ -713,7 +735,7 @@ public sealed override Type[] GetForwardedTypes()
List types = new List();
List exceptions = new List();
- MetadataImport scope = GetManifestModule(GetNativeHandle()).MetadataImport;
+ MetadataImport scope = GetManifestModule().MetadataImport;
scope.Enum(MetadataTokenType.ExportedType, 0, out MetadataEnumResult enumResult);
RuntimeAssembly runtimeAssembly = this;
QCallAssembly pAssembly = new QCallAssembly(ref runtimeAssembly);
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
index a610a5cbe3e..2ac866f246f 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeCustomAttributeData.cs
@@ -82,7 +82,7 @@ internal static IList GetCustomAttributesInternal(RuntimeAs
// No pseudo attributes for RuntimeAssembly
- return GetCustomAttributes((RuntimeModule)target.ManifestModule, RuntimeAssembly.GetToken(target.GetNativeHandle()));
+ return GetCustomAttributes((RuntimeModule)target.ManifestModule, RuntimeAssembly.GetToken(target));
}
internal static IList GetCustomAttributesInternal(RuntimeParameterInfo target)
@@ -1227,7 +1227,7 @@ internal static bool IsDefined(RuntimeAssembly assembly, RuntimeType caType)
Debug.Assert(caType is not null);
// No pseudo attributes for RuntimeAssembly
- return IsCustomAttributeDefined((assembly.ManifestModule as RuntimeModule)!, RuntimeAssembly.GetToken(assembly.GetNativeHandle()), caType);
+ return IsCustomAttributeDefined((assembly.ManifestModule as RuntimeModule)!, RuntimeAssembly.GetToken(assembly), caType);
}
internal static bool IsDefined(RuntimeModule module, RuntimeType caType)
@@ -1388,7 +1388,7 @@ internal static object[] GetCustomAttributes(RuntimeAssembly assembly, RuntimeTy
// No pseudo attributes for RuntimeAssembly
- int assemblyToken = RuntimeAssembly.GetToken(assembly.GetNativeHandle());
+ int assemblyToken = RuntimeAssembly.GetToken(assembly);
return GetCustomAttributes((assembly.ManifestModule as RuntimeModule)!, assemblyToken, 0, caType);
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/ICastableHelpers.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/ICastableHelpers.cs
deleted file mode 100644
index ca9f73a5c66..00000000000
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/ICastableHelpers.cs
+++ /dev/null
@@ -1,25 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Diagnostics.CodeAnalysis;
-
-namespace System.Runtime.CompilerServices
-{
- ///
- /// Helpers that allows VM to call into ICastable methods without having to deal with RuntimeTypeHandle.
- /// RuntimeTypeHandle is a struct and is always passed in stack in x86, which our VM call helpers don't
- /// particularly like.
- ///
- internal static class ICastableHelpers
- {
- internal static bool IsInstanceOfInterface(ICastable castable, RuntimeType type, [NotNullWhen(true)] out Exception? castError)
- {
- return castable.IsInstanceOfInterface(new RuntimeTypeHandle(type), out castError);
- }
-
- internal static RuntimeType GetImplType(ICastable castable, RuntimeType interfaceType)
- {
- return castable.GetImplType(new RuntimeTypeHandle(interfaceType)).GetRuntimeType();
- }
- }
-}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
index e155b109886..77311a02421 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
@@ -220,19 +220,35 @@ public static unsafe void PrepareMethod(RuntimeMethodHandle method, RuntimeTypeH
[MethodImpl(MethodImplOptions.InternalCall)]
public static extern void PrepareDelegate(Delegate d);
- [MethodImpl(MethodImplOptions.InternalCall)]
- public static extern int GetHashCode(object? o);
-
///
/// If a hash code has been assigned to the object, it is returned. Otherwise zero is
/// returned.
///
- ///
- /// The advantage of this over is that it avoids assigning a hash
- /// code to the object if it does not already have one.
- ///
[MethodImpl(MethodImplOptions.InternalCall)]
- internal static extern int TryGetHashCode(object o);
+ internal static extern int TryGetHashCode(object? o);
+
+ [LibraryImport(QCall, EntryPoint = "ObjectNative_GetHashCodeSlow")]
+ private static partial int GetHashCodeSlow(ObjectHandleOnStack o);
+
+ public static int GetHashCode(object? o)
+ {
+ int hashCode = TryGetHashCode(o);
+ if (hashCode == 0)
+ {
+ return GetHashCodeWorker(o);
+ }
+ return hashCode;
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ static int GetHashCodeWorker(object? o)
+ {
+ if (o is null)
+ {
+ return 0;
+ }
+ return GetHashCodeSlow(ObjectHandleOnStack.Create(ref o));
+ }
+ }
public static new unsafe bool Equals(object? o1, object? o2)
{
@@ -344,6 +360,11 @@ internal static int EnumCompareTo(T x, T y) where T : struct, Enum
return x.CompareTo(y);
}
+ // The body of this function will be created by the EE for the specific type.
+ // See getILIntrinsicImplementation for how this happens.
+ [Intrinsic]
+ internal static extern unsafe void CopyConstruct(T* dest, T* src) where T : unmanaged;
+
internal static ref byte GetRawData(this object obj) =>
ref Unsafe.As(obj).Data;
@@ -420,16 +441,8 @@ internal static unsafe bool ObjectHasComponentSize(object obj)
//
// GC.KeepAlive(o);
//
- [MethodImpl(MethodImplOptions.AggressiveInlining)]
[Intrinsic]
- internal static unsafe MethodTable* GetMethodTable(object obj)
- {
- // The body of this function will be replaced by the EE with unsafe code
- // See getILIntrinsicImplementationForRuntimeHelpers for how this happens.
-
- return (MethodTable*)Unsafe.Add(ref Unsafe.As(ref obj.GetRawData()), -1);
- }
-
+ internal static unsafe MethodTable* GetMethodTable(object obj) => GetMethodTable(obj);
[LibraryImport(QCall, EntryPoint = "MethodTable_AreTypesEquivalent")]
[return: MarshalAs(UnmanagedType.Bool)]
@@ -666,7 +679,6 @@ internal unsafe struct MethodTable
// Types that require non-trivial interface cast have this bit set in the category
private const uint enum_flag_NonTrivialInterfaceCast = 0x00080000 // enum_flag_Category_Array
| 0x40000000 // enum_flag_ComObject
- | 0x00400000 // enum_flag_ICastable;
| 0x10000000 // enum_flag_IDynamicInterfaceCastable;
| 0x00040000; // enum_flag_Category_ValueType
@@ -791,15 +803,20 @@ public TypeHandle GetArrayElementTypeHandle()
}
// Subset of src\vm\methodtable.h
- [StructLayout(LayoutKind.Explicit)]
+ [StructLayout(LayoutKind.Sequential)]
internal unsafe struct MethodTableAuxiliaryData
{
- [FieldOffset(0)]
private uint Flags;
+ private void* LoaderModule;
+ private nint ExposedClassObjectRaw;
private const uint enum_flag_HasCheckedCanCompareBitsOrUseFastGetHashCode = 0x0002; // Whether we have checked the overridden Equals or GetHashCode
private const uint enum_flag_CanCompareBitsOrUseFastGetHashCode = 0x0004; // Is any field type or sub field type overridden Equals or GetHashCode
+ private const uint enum_flag_HasCheckedStreamOverride = 0x0400;
+ private const uint enum_flag_StreamOverriddenRead = 0x0800;
+ private const uint enum_flag_StreamOverriddenWrite = 0x1000;
+
public bool HasCheckedCanCompareBitsOrUseFastGetHashCode => (Flags & enum_flag_HasCheckedCanCompareBitsOrUseFastGetHashCode) != 0;
public bool CanCompareBitsOrUseFastGetHashCode
@@ -810,6 +827,34 @@ public bool CanCompareBitsOrUseFastGetHashCode
return (Flags & enum_flag_CanCompareBitsOrUseFastGetHashCode) != 0;
}
}
+
+ public bool HasCheckedStreamOverride => (Flags & enum_flag_HasCheckedStreamOverride) != 0;
+
+ public bool IsStreamOverriddenRead
+ {
+ get
+ {
+ Debug.Assert(HasCheckedStreamOverride);
+ return (Flags & enum_flag_StreamOverriddenRead) != 0;
+ }
+ }
+
+ public bool IsStreamOverriddenWrite
+ {
+ get
+ {
+ Debug.Assert(HasCheckedStreamOverride);
+ return (Flags & enum_flag_StreamOverriddenWrite) != 0;
+ }
+ }
+
+ public RuntimeType? ExposedClassObject
+ {
+ get
+ {
+ return *(RuntimeType*)Unsafe.AsPointer(ref ExposedClassObjectRaw);
+ }
+ }
}
///
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerable.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerable.cs
deleted file mode 100644
index b3ab4f4056b..00000000000
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerable.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace System.Runtime.InteropServices.ComTypes
-{
- /*==========================================================================
- ** Interface: IEnumerable
- ** Purpose:
- ** This interface is redefined here since the original IEnumerable interface
- ** has all its methods marked as ecall's since it is a managed standard
- ** interface. This interface is used from within the runtime to make a call
- ** on the COM server directly when it implements the IEnumerable interface.
- ==========================================================================*/
- [Guid("496B0ABE-CDEE-11d3-88E8-00902754C43A")]
- internal interface IEnumerable
- {
- [DispId(-4)]
- Collections.IEnumerator GetEnumerator();
- }
-}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerator.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerator.cs
deleted file mode 100644
index 1e07dcc6ded..00000000000
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/ComTypes/IEnumerator.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-namespace System.Runtime.InteropServices.ComTypes
-{
- [Guid("496B0ABF-CDEE-11d3-88E8-00902754C43A")]
- internal interface IEnumerator
- {
- bool MoveNext();
- object Current { get; }
- void Reset();
- }
-}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/CustomMarshalers/EnumerableToDispatchMarshaler.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/CustomMarshalers/EnumerableToDispatchMarshaler.cs
deleted file mode 100644
index 354e4b66545..00000000000
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/CustomMarshalers/EnumerableToDispatchMarshaler.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections;
-using System.Runtime.Versioning;
-
-namespace System.Runtime.InteropServices.CustomMarshalers
-{
- [SupportedOSPlatform("windows")]
- internal sealed class EnumerableToDispatchMarshaler : ICustomMarshaler
- {
- private static readonly EnumerableToDispatchMarshaler s_enumerableToDispatchMarshaler = new EnumerableToDispatchMarshaler();
-
- public static ICustomMarshaler GetInstance(string? cookie) => s_enumerableToDispatchMarshaler;
-
- private EnumerableToDispatchMarshaler()
- {
- }
-
- public void CleanUpManagedData(object ManagedObj)
- {
- }
-
- public void CleanUpNativeData(IntPtr pNativeData)
- {
- Marshal.Release(pNativeData);
- }
-
- public int GetNativeDataSize()
- {
- // Return -1 to indicate the managed type this marshaler handles is not a value type.
- return -1;
- }
-
- public IntPtr MarshalManagedToNative(object ManagedObj)
- {
- ArgumentNullException.ThrowIfNull(ManagedObj);
-
- return Marshal.GetComInterfaceForObject