diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 27a98cc5969..6de4a35b7f5 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,210 +1,210 @@
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/runtime
- 77115c33ec4fb8688e4803cd8f76cd0fa4bff7d7
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/aspnetcore
- 7f26caa86f90cb81efa98839c57417ef3d4d8154
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/efcore
-
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/arcade
- 6d4b01bce3a29c172faff5abc0bfe2ae3d1fef3d
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/arcade
- 6d4b01bce3a29c172faff5abc0bfe2ae3d1fef3d
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
-
- https://github.com/dotnet/arcade
-
+
+ https://github.com/dotnet/dotnet
+ 0b60d265d30d96c827b73dbce74cb78f600d3c92
diff --git a/eng/Versions.props b/eng/Versions.props
index 225d9e6d511..ecff850cdac 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -27,56 +27,56 @@
-->
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
- 10.0.0-preview.3.25128.5
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
- 10.0.0-preview.3.25151.1
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
+ 10.0.0-rtm.25512.102
- 9.0.3
+ 10.0.0-rtm.25512.102
- 10.0.0-beta.25126.4
+ 10.0.0-beta.25512.102
diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd
index 56c2f25ac22..ac1f72bf94e 100644
--- a/eng/common/CIBuild.cmd
+++ b/eng/common/CIBuild.cmd
@@ -1,2 +1,2 @@
@echo off
-powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
\ No newline at end of file
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 5db4ad71ee2..fc8d618014e 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -7,11 +7,11 @@
# See example call for this script below.
#
# - task: PowerShell@2
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup internal Feeds Credentials
# condition: eq(variables['Agent.OS'], 'Windows_NT')
# inputs:
-# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
-# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+# arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
# env:
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
#
@@ -34,19 +34,28 @@ Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
+# Adds or enables the package source with the given name
+function AddOrEnablePackageSource($sources, $disabledPackageSources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
+ if ($disabledPackageSources -eq $null -or -not (EnableInternalPackageSource -DisabledPackageSources $disabledPackageSources -Creds $creds -PackageSourceName $SourceName)) {
+ AddPackageSource -Sources $sources -SourceName $SourceName -SourceEndPoint $SourceEndPoint -Creds $creds -Username $userName -pwd $Password
+ }
+}
+
# Add source entry to PackageSources
function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null)
{
+ Write-Host "Adding package source $SourceName"
+
$packageSource = $doc.CreateElement("add")
$packageSource.SetAttribute("key", $SourceName)
$packageSource.SetAttribute("value", $SourceEndPoint)
$sources.AppendChild($packageSource) | Out-Null
}
else {
- Write-Host "Package source $SourceName already present."
+ Write-Host "Package source $SourceName already present and enabled."
}
AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
@@ -59,6 +68,8 @@ function AddCredential($creds, $source, $username, $pwd) {
return;
}
+ Write-Host "Inserting credential for feed: " $source
+
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
@@ -91,24 +102,27 @@ function AddCredential($creds, $source, $username, $pwd) {
$passwordElement.SetAttribute("value", $pwd)
}
-function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) {
- $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
-
- Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
-
- ForEach ($PackageSource in $maestroPrivateSources) {
- Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
- AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
+# Enable all darc-int package sources.
+function EnableMaestroInternalPackageSources($DisabledPackageSources, $Creds) {
+ $maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroInternalSources) {
+ EnableInternalPackageSource -DisabledPackageSources $DisabledPackageSources -Creds $Creds -PackageSourceName $DisabledPackageSource.key
}
}
-function EnablePrivatePackageSources($DisabledPackageSources) {
- $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
- ForEach ($DisabledPackageSource in $maestroPrivateSources) {
- Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
+# Enables an internal package source by name, if found. Returns true if the package source was found and enabled, false otherwise.
+function EnableInternalPackageSource($DisabledPackageSources, $Creds, $PackageSourceName) {
+ $DisabledPackageSource = $DisabledPackageSources.SelectSingleNode("add[@key='$PackageSourceName']")
+ if ($DisabledPackageSource) {
+ Write-Host "Enabling internal source '$($DisabledPackageSource.key)'."
+
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
$DisabledPackageSources.RemoveChild($DisabledPackageSource)
+
+ AddCredential -Creds $creds -Source $DisabledPackageSource.Key -Username $userName -pwd $Password
+ return $true
}
+ return $false
}
if (!(Test-Path $ConfigFile -PathType Leaf)) {
@@ -121,15 +135,17 @@ $doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName
$doc.Load($filename)
-# Get reference to or create one if none exist already
+# Get reference to - fail if none exist
$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
if ($sources -eq $null) {
- $sources = $doc.CreateElement("packageSources")
- $doc.DocumentElement.AppendChild($sources) | Out-Null
+ Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
+ ExitWithExitCode 1
}
$creds = $null
+$feedSuffix = "v3/index.json"
if ($Password) {
+ $feedSuffix = "v2"
# Looks for a node. Create it if none is found.
$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
if ($creds -eq $null) {
@@ -138,33 +154,22 @@ if ($Password) {
}
}
+$userName = "dn-bot"
+
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
if ($disabledSources -ne $null) {
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
- EnablePrivatePackageSources -DisabledPackageSources $disabledSources
-}
-
-$userName = "dn-bot"
-
-# Insert credential nodes for Maestro's private feeds
-InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
-
-# 3.1 uses a different feed url format so it's handled differently here
-$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
-if ($dotnet31Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ EnableMaestroInternalPackageSources -DisabledPackageSources $disabledSources -Creds $creds
}
-
-$dotnetVersions = @('5','6','7','8','9')
+$dotnetVersions = @('5','6','7','8','9','10')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
+ AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
}
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index 4604b61b032..dd2564aef01 100755
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -11,8 +11,8 @@
# - task: Bash@3
# displayName: Setup Internal Feeds
# inputs:
-# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
-# arguments: $(Build.SourcesDirectory)/NuGet.config
+# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.sh
+# arguments: $(System.DefaultWorkingDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT')
# - task: NuGetAuthenticate@1
#
@@ -52,78 +52,126 @@ if [[ `uname -s` == "Darwin" ]]; then
TB=''
fi
-# Ensure there is a ... section.
-grep -i "" $ConfigFile
-if [ "$?" != "0" ]; then
- echo "Adding ... section."
- ConfigNodeHeader=""
- PackageSourcesTemplate="${TB}${NL}${TB}"
+# Enables an internal package source by name, if found. Returns 0 if found and enabled, 1 if not found.
+EnableInternalPackageSource() {
+ local PackageSourceName="$1"
+
+ # Check if disabledPackageSources section exists
+ grep -i "" "$ConfigFile" > /dev/null
+ if [ "$?" != "0" ]; then
+ return 1 # No disabled sources section
+ fi
+
+ # Check if this source name is disabled
+ grep -i " /dev/null
+ if [ "$?" == "0" ]; then
+ echo "Enabling internal source '$PackageSourceName'."
+ # Remove the disabled entry
+ local OldDisableValue=""
+ local NewDisableValue=""
+ sed -i.bak "s|$OldDisableValue|$NewDisableValue|" "$ConfigFile"
+
+ # Add the source name to PackageSources for credential handling
+ PackageSources+=("$PackageSourceName")
+ return 0 # Found and enabled
+ fi
+
+ return 1 # Not found in disabled sources
+}
+
+# Add source entry to PackageSources
+AddPackageSource() {
+ local SourceName="$1"
+ local SourceEndPoint="$2"
+
+ # Check if source already exists
+ grep -i " /dev/null
+ if [ "$?" == "0" ]; then
+ echo "Package source $SourceName already present and enabled."
+ PackageSources+=("$SourceName")
+ return
+ fi
+
+ echo "Adding package source $SourceName"
+ PackageSourcesNodeFooter=""
+ PackageSourceTemplate="${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" "$ConfigFile"
+ PackageSources+=("$SourceName")
+}
+
+# Adds or enables the package source with the given name
+AddOrEnablePackageSource() {
+ local SourceName="$1"
+ local SourceEndPoint="$2"
+
+ # Try to enable if disabled, if not found then add new source
+ EnableInternalPackageSource "$SourceName"
+ if [ "$?" != "0" ]; then
+ AddPackageSource "$SourceName" "$SourceEndPoint"
+ fi
+}
- sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile
-fi
+# Enable all darc-int package sources
+EnableMaestroInternalPackageSources() {
+ # Check if disabledPackageSources section exists
+ grep -i "" "$ConfigFile" > /dev/null
+ if [ "$?" != "0" ]; then
+ return # No disabled sources section
+ fi
+
+ # Find all darc-int disabled sources
+ local DisabledDarcIntSources=()
+ DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' "$ConfigFile" | tr -d '"')
+
+ for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
+ if [[ $DisabledSourceName == darc-int* ]]; then
+ EnableInternalPackageSource "$DisabledSourceName"
+ fi
+ done
+}
-# Ensure there is a ... section.
-grep -i "" $ConfigFile
+# Ensure there is a ... section.
+grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding ... section."
-
- PackageSourcesNodeFooter=""
- PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
+ Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
+ ExitWithExitCode 1
fi
PackageSources=()
-# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
-grep -i "... section.
+ grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding dotnet3.1-internal to the packageSources."
- PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ echo "Adding ... section."
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=('dotnet3.1-internal')
-
- grep -i "" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding dotnet3.1-internal-transport to the packageSources."
PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
fi
- PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7' '8' '9')
+# Check for disabledPackageSources; we'll enable any darc-int ones we find there
+grep -i "" $ConfigFile > /dev/null
+if [ "$?" == "0" ]; then
+ echo "Checking for any darc-int disabled package sources in the disabledPackageSources node"
+ EnableMaestroInternalPackageSources
+fi
+
+DotNetVersions=('5' '6' '7' '8' '9' '10')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
- grep -i " /dev/null
if [ "$?" == "0" ]; then
- grep -i ""
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=("$FeedPrefix-internal")
-
- grep -i "" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding $FeedPrefix-internal-transport to the packageSources."
- PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=("$FeedPrefix-internal-transport")
+ AddOrEnablePackageSource "$FeedPrefix-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/$FeedSuffix"
+ AddOrEnablePackageSource "$FeedPrefix-internal-transport" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/$FeedSuffix"
fi
done
@@ -139,29 +187,12 @@ if [ "$CredToken" ]; then
# Check if there is no existing credential for this FeedName
grep -i "<$FeedName>" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding credentials for $FeedName."
+ echo " Inserting credential for feed: $FeedName"
PackageSourceCredentialsNodeFooter=""
- NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}$FeedName>"
+ NewCredential="${TB}${TB}<$FeedName>${NL}${TB}${NL}${TB}${TB}${NL}${TB}${TB}$FeedName>"
sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
fi
done
fi
-
-# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
-grep -i "" $ConfigFile
-if [ "$?" == "0" ]; then
- DisabledDarcIntSources=()
- echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
- DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
- for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
- if [[ $DisabledSourceName == darc-int* ]]
- then
- OldDisableValue=""
- NewDisableValue=""
- sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
- echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
- fi
- done
-fi
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 438f9920c43..8cfee107e7a 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -7,6 +7,7 @@ Param(
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
+ [switch] $buildCheck = $false,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch][Alias('b')]$build,
@@ -20,6 +21,7 @@ Param(
[switch] $publish,
[switch] $clean,
[switch][Alias('pb')]$productBuild,
+ [switch]$fromVMR,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -71,6 +73,9 @@ function Print-Usage() {
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
+ Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ Write-Host " -buildCheck Sets /check msbuild parameter"
+ Write-Host " -fromVMR Set when building from within the VMR"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
@@ -97,6 +102,7 @@ function Build {
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
+ $check = if ($buildCheck) { '/check' } else { '' }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
@@ -113,6 +119,7 @@ function Build {
MSBuild $toolsetBuildProj `
$bl `
$platformArg `
+ $check `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
@@ -122,11 +129,13 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
- /p:DotNetBuildRepo=$productBuild `
+ /p:DotNetBuild=$productBuild `
+ /p:DotNetBuildFromVMR=$fromVMR `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
+ /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog `
@properties
}
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 483647daf18..9767bb411a4 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -42,6 +42,8 @@ usage()
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --buildCheck Sets /check msbuild parameter"
+ echo " --fromVMR Set when building from within the VMR"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
@@ -63,6 +65,7 @@ restore=false
build=false
source_build=false
product_build=false
+from_vmr=false
rebuild=false
test=false
integration_test=false
@@ -76,6 +79,7 @@ clean=false
warn_as_error=true
node_reuse=true
+build_check=false
binary_log=false
exclude_ci_binary_log=false
pipelines_log=false
@@ -87,7 +91,7 @@ verbosity='minimal'
runtime_source_feed=''
runtime_source_feed_key=''
-properties=''
+properties=()
while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
@@ -127,19 +131,22 @@ while [[ $# > 0 ]]; do
-pack)
pack=true
;;
- -sourcebuild|-sb)
+ -sourcebuild|-source-build|-sb)
build=true
source_build=true
product_build=true
restore=true
pack=true
;;
- -productBuild|-pb)
+ -productbuild|-product-build|-pb)
build=true
product_build=true
restore=true
pack=true
;;
+ -fromvmr|-from-vmr)
+ from_vmr=true
+ ;;
-test|-t)
test=true
;;
@@ -173,6 +180,9 @@ while [[ $# > 0 ]]; do
node_reuse=$2
shift
;;
+ -buildcheck)
+ build_check=true
+ ;;
-runtimesourcefeed)
runtime_source_feed=$2
shift
@@ -182,7 +192,7 @@ while [[ $# > 0 ]]; do
shift
;;
*)
- properties="$properties $1"
+ properties+=("$1")
;;
esac
@@ -216,7 +226,7 @@ function Build {
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
- properties="$properties /p:Projects=$projects"
+ properties+=("/p:Projects=$projects")
fi
local bl=""
@@ -224,14 +234,21 @@ function Build {
bl="/bl:\"$log_dir/Build.binlog\""
fi
+ local check=""
+ if [[ "$build_check" == true ]]; then
+ check="/check"
+ fi
+
MSBuild $_InitializeToolset \
$bl \
+ $check \
/p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
- /p:DotNetBuildRepo=$product_build \
+ /p:DotNetBuild=$product_build \
/p:DotNetBuildSourceOnly=$source_build \
+ /p:DotNetBuildFromVMR=$from_vmr \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
@@ -239,7 +256,8 @@ function Build {
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
- $properties
+ /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \
+ ${properties[@]+"${properties[@]}"}
ExitWithExitCode 0
}
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
index 1a02c0dec8f..66e3b0ac61c 100755
--- a/eng/common/cibuild.sh
+++ b/eng/common/cibuild.sh
@@ -13,4 +13,4 @@ while [[ -h $source ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
\ No newline at end of file
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
index 295c9a2317c..5ce51840619 100644
--- a/eng/common/core-templates/job/job.yml
+++ b/eng/common/core-templates/job/job.yml
@@ -20,10 +20,10 @@ parameters:
artifacts: ''
enableMicrobuild: false
enableMicrobuildForMacAndLinux: false
+ microbuildUseESRP: true
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
- enablePublishUsingPipelines: false
enableBuildRetry: false
mergeTestResults: false
testRunTitle: ''
@@ -74,9 +74,6 @@ jobs:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
@@ -132,6 +129,7 @@ jobs:
parameters:
enableMicrobuild: ${{ parameters.enableMicrobuild }}
enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
+ microbuildUseESRP: ${{ parameters.microbuildUseESRP }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
@@ -148,16 +146,6 @@ jobs:
- ${{ each step in parameters.steps }}:
- ${{ step }}
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- ${{ each step in parameters.componentGovernanceSteps }}:
- ${{ step }}
@@ -175,7 +163,7 @@ jobs:
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
@@ -186,7 +174,7 @@ jobs:
inputs:
testResultsFormat: 'VSTest'
testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
@@ -230,7 +218,7 @@ jobs:
- task: CopyFiles@2
displayName: Gather buildconfiguration for build retry
inputs:
- SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration'
+ SourceFolder: '$(System.DefaultWorkingDirectory)/eng/common/BuildConfiguration'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
continueOnError: true
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
index 00feec8ebbc..c5788829a87 100644
--- a/eng/common/core-templates/job/onelocbuild.yml
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -4,11 +4,11 @@ parameters:
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
-
+
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
- SourcesDirectory: $(Build.SourcesDirectory)
+ SourcesDirectory: $(System.DefaultWorkingDirectory)
CreatePr: true
AutoCompletePr: false
ReusePr: true
@@ -27,7 +27,7 @@ parameters:
is1ESPipeline: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
+
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
@@ -68,7 +68,7 @@ jobs:
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
condition: ${{ parameters.condition }}
@@ -86,8 +86,7 @@ jobs:
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
@@ -100,22 +99,20 @@ jobs:
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
- parameters:
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- args:
- displayName: Publish Localization Files
- pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
- publishLocation: Container
- artifactName: Loc
- condition: ${{ parameters.condition }}
+ # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
+ - task: CopyFiles@2
+ displayName: Copy LocProject.json
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
+ Contents: 'LocProject.json'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
+ condition: ${{ parameters.condition }}
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
- displayName: Publish LocProject.json
- pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- publishLocation: Container
- artifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ targetPath: '$(Build.ArtifactStagingDirectory)/loc'
+ artifactName: 'Loc'
+ displayName: 'Publish Localization Files'
+ condition: ${{ parameters.condition }}
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
index c7d59dcbfe1..37dff559fc1 100644
--- a/eng/common/core-templates/job/publish-build-assets.yml
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -20,9 +20,6 @@ parameters:
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
@@ -32,6 +29,19 @@ parameters:
is1ESPipeline: ''
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
+ # Optional, publishing version
+ publishingVersion: 3
+
+ # Optional: A minimatch pattern for the asset manifests to publish to BAR
+ assetManifestsPattern: '*/manifests/**/*.xml'
+
+ repositoryAlias: self
+
+ officialBuildId: ''
+
jobs:
- job: Asset_Registry_Publish
@@ -54,6 +64,11 @@ jobs:
value: false
# unconditional - needed for logs publishing (redactor tool version)
- template: /eng/common/core-templates/post-build/common-variables.yml
+ - name: OfficialBuildId
+ ${{ if ne(parameters.officialBuildId, '') }}:
+ value: ${{ parameters.officialBuildId }}
+ ${{ else }}:
+ value: $(Build.BuildNumber)
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
@@ -72,17 +87,36 @@ jobs:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
+ - checkout: ${{ parameters.repositoryAlias }}
fetchDepth: 3
clean: true
-
- - task: DownloadPipelineArtifact@2
- displayName: Download Asset Manifests
- inputs:
- artifactName: AssetManifests
- targetPath: '$(Build.StagingDirectory)/AssetManifests'
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if eq(parameters.isAssetlessBuild, 'false') }}:
+ - ${{ if eq(parameters.publishingVersion, 3) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Asset Manifests
+ inputs:
+ artifactName: AssetManifests
+ targetPath: '$(Build.StagingDirectory)/AssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download V4 asset manifests
+ inputs:
+ itemPattern: '*/manifests/**/*.xml'
+ targetPath: '$(Build.StagingDirectory)/AllAssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: CopyFiles@2
+ displayName: Copy V4 asset manifests to AssetManifests
+ inputs:
+ SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests'
+ Contents: ${{ parameters.assetManifestsPattern }}
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ flattenFolders: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
@@ -92,12 +126,12 @@ jobs:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests'
+ /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }}
/p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
+ /p:OfficialBuildId=$(OfficialBuildId)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
@@ -112,13 +146,24 @@ jobs:
Add-Content -Path $filePath -Value "$(DefaultChannels)"
Add-Content -Path $filePath -Value $(IsStableBuild)
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ $symbolExclusionfile = "$(System.DefaultWorkingDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if (Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
}
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
+ artifactName: AssetManifests
+ displayName: 'Publish Merged Manifest'
+ retryCountOnTaskFailure: 10 # for any logs being locked
+ sbomEnabled: false # we don't need SBOM for logs
+
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
@@ -128,7 +173,7 @@ jobs:
publishLocation: Container
artifactName: ReleaseConfigs
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
@@ -141,7 +186,7 @@ jobs:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: >
-BuildId $(BARBuildId)
-PublishingInfraVersion 3
@@ -149,6 +194,7 @@ jobs:
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/core-templates/steps/publish-logs.yml
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
index 05f7ad6ef0d..d805d5faeb9 100644
--- a/eng/common/core-templates/job/source-build.yml
+++ b/eng/common/core-templates/job/source-build.yml
@@ -27,6 +27,8 @@ parameters:
# Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations.
+ # buildArguments: ''
+ # Specifies additional build arguments to pass to the build script.
# jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool.
diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml
index f2144252cc6..dbc14ac580a 100644
--- a/eng/common/core-templates/jobs/codeql-build.yml
+++ b/eng/common/core-templates/jobs/codeql-build.yml
@@ -15,7 +15,6 @@ jobs:
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
enableTelemetry: true
variables:
@@ -25,7 +24,7 @@ jobs:
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
index ea69be4341c..01ada747665 100644
--- a/eng/common/core-templates/jobs/jobs.yml
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -5,9 +5,6 @@ parameters:
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
@@ -30,6 +27,9 @@ parameters:
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
@@ -43,6 +43,8 @@ parameters:
artifacts: {}
is1ESPipeline: ''
+ repositoryAlias: self
+ officialBuildId: ''
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
@@ -83,7 +85,6 @@ jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
@@ -96,7 +97,7 @@ jobs:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}:
- template: ../job/publish-build-assets.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
@@ -108,12 +109,12 @@ jobs:
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }}
+ isAssetlessBuild: ${{ parameters.isAssetlessBuild }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ repositoryAlias: ${{ parameters.repositoryAlias }}
+ officialBuildId: ${{ parameters.officialBuildId }}
diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml
index a10ccfbee6d..d92860cba20 100644
--- a/eng/common/core-templates/jobs/source-build.yml
+++ b/eng/common/core-templates/jobs/source-build.yml
@@ -2,19 +2,13 @@ parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
# See /eng/common/core-templates/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
@@ -31,16 +25,6 @@ parameters:
jobs:
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
index a8c0bd3b921..f6f87fe5c67 100644
--- a/eng/common/core-templates/post-build/post-build.yml
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -60,6 +60,11 @@ parameters:
artifactNames: ''
downloadArtifacts: true
+ - name: isAssetlessBuild
+ type: boolean
+ displayName: Is Assetless Build
+ default: false
+
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
@@ -149,7 +154,7 @@ stages:
- task: PowerShell@2
displayName: Validate
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- job:
@@ -188,9 +193,6 @@ stages:
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
@@ -206,7 +208,7 @@ stages:
filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
- template: /eng/common/core-templates/steps/publish-logs.yml
@@ -256,7 +258,7 @@ stages:
- task: PowerShell@2
displayName: Validate
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name)
@@ -311,7 +313,7 @@ stages:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: >
-BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
@@ -320,3 +322,4 @@ stages:
-RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml
index f7602980dbe..a7abd58c4bb 100644
--- a/eng/common/core-templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -36,7 +36,7 @@ steps:
$AzureDevOpsBuildId = $Env:Build_BuildId
}
else {
- . $(Build.SourcesDirectory)\eng\common\tools.ps1
+ . $(System.DefaultWorkingDirectory)\eng\common\tools.ps1
$darc = Get-Darc
$buildInfo = & $darc get-build `
--id ${{ parameters.BARBuildId }} `
diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml
index 64f881bffc3..4085512b690 100644
--- a/eng/common/core-templates/steps/enable-internal-sources.yml
+++ b/eng/common/core-templates/steps/enable-internal-sources.yml
@@ -17,8 +17,8 @@ steps:
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
- arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
env:
Token: ${{ parameters.legacyCredential }}
# If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
@@ -29,8 +29,8 @@ steps:
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
- arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config
- ${{ else }}:
- template: /eng/common/templates/steps/get-federated-access-token.yml
parameters:
@@ -39,8 +39,8 @@ steps:
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
- arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
# This is required in certain scenarios to install the ADO credential provider.
# It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
# (e.g. dotnet msbuild).
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
index 44a9636cdff..c05f6502797 100644
--- a/eng/common/core-templates/steps/generate-sbom.yml
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -6,7 +6,7 @@
parameters:
PackageVersion: 10.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: ''
diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml
index 9db5617ea7d..d2901470a7f 100644
--- a/eng/common/core-templates/steps/get-delegation-sas.yml
+++ b/eng/common/core-templates/steps/get-delegation-sas.yml
@@ -31,16 +31,7 @@ steps:
# Calculate the expiration of the SAS token and convert to UTC
$expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
- # Temporarily work around a helix issue where SAS tokens with / in them will cause incorrect downloads
- # of correlation payloads. https://github.com/dotnet/dnceng/issues/3484
- $sas = ""
- do {
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
- } while($sas.IndexOf('/') -ne -1)
+ $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to generate SAS token."
diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml
index 2a6a529482b..d6b9878f54d 100644
--- a/eng/common/core-templates/steps/install-microbuild.yml
+++ b/eng/common/core-templates/steps/install-microbuild.yml
@@ -4,70 +4,88 @@ parameters:
# Enable install tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
+ # Determines whether the ESRP service connection information should be passed to the signing plugin.
+ # This overlaps with _SignType to some degree. We only need the service connection for real signing.
+ # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place.
+ # Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod.
+ # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The
+ # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough.
+ microbuildUseESRP: true
# Location of the MicroBuild output folder
- microBuildOutputFolder: '$(Agent.TempDirectory)'
+ # NOTE: There's something that relies on this being in the "default" source directory for tasks such as Signing to work properly.
+ microBuildOutputFolder: '$(Build.SourcesDirectory)'
+
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
- # Install Python 3.12.x on when Python > 3.12.x is installed - https://github.com/dotnet/source-build/issues/4802
- - script: |
- version=$(python3 --version | awk '{print $2}')
- major=$(echo $version | cut -d. -f1)
- minor=$(echo $version | cut -d. -f2)
-
- installPython=false
- if [ "$major" -gt 3 ] || { [ "$major" -eq 3 ] && [ "$minor" -gt 12 ]; }; then
- installPython=true
- fi
-
- echo "Python version: $version."
- echo "Install Python 3.12.x: $installPython."
- echo "##vso[task.setvariable variable=installPython;isOutput=true]$installPython"
- name: InstallPython
- displayName: 'Determine Python installation'
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
-
- - task: UsePythonVersion@0
- inputs:
- versionSpec: '3.12.x'
- displayName: 'Use Python 3.12.x'
- condition: and(succeeded(), eq(variables['InstallPython.installPython'], 'true'), ne(variables['Agent.Os'], 'Windows_NT'))
-
# Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable
- task: UseDotNet@2
displayName: Install .NET 8.0 SDK for MicroBuild Plugin
inputs:
packageType: sdk
version: 8.0.x
- installationPath: ${{ parameters.microBuildOutputFolder }}/dotnet
+ installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet
workingDirectory: ${{ parameters.microBuildOutputFolder }}
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+ - script: |
+ REM Check if ESRP is disabled while SignType is real
+ if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" (
+ echo Error: ESRP must be enabled when SignType is real.
+ exit /b 1
+ )
+ displayName: 'Validate ESRP usage (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ - script: |
+ # Check if ESRP is disabled while SignType is real
+ if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then
+ echo "Error: ESRP must be enabled when SignType is real."
+ exit 1
+ fi
+ displayName: 'Validate ESRP usage (Non-Windows)'
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ # Two different MB install steps. This is due to not being able to use the agent OS during
+ # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
+ # we can avoid including the MB install step if not enabled at all. This avoids a bunch of
+ # extra pipeline authorizations, since most pipelines do not sign on non-Windows.
- task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
+ displayName: Install MicroBuild plugin (Windows)
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}:
- azureSubscription: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
+ ${{ else }}:
+ ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
- condition: and(
- succeeded(),
- or(
- and(
- eq(variables['Agent.Os'], 'Windows_NT'),
- in(variables['_SignType'], 'real', 'test')
- ),
- and(
- ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
- ne(variables['Agent.Os'], 'Windows_NT'),
- eq(variables['_SignType'], 'real')
- )
- ))
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
+
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin (non-Windows)
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39
+ ${{ else }}:
+ ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real'))
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
index de24d0087c5..10f825e270a 100644
--- a/eng/common/core-templates/steps/publish-logs.yml
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -12,22 +12,22 @@ steps:
inputs:
targetType: inline
script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
- task: PowerShell@2
displayName: Redact Logs
inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1
# For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
# If the file exists - sensitive data for redaction will be sourced from it
# (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
-BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
'$(publishing-dnceng-devdiv-code-r-build-re)'
'$(MaestroAccessToken)'
'$(dn-bot-all-orgs-artifact-feeds-rw)'
@@ -44,7 +44,7 @@ steps:
- task: CopyFiles@2
displayName: Gather post build logs
inputs:
- SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
+ SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
condition: always()
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
index 2341706b066..acf16ed3496 100644
--- a/eng/common/core-templates/steps/source-build.yml
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -19,19 +19,6 @@ steps:
set -x
df -h
- # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo.
- internalRestoreArgs=
- if ! git diff --quiet; then
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
@@ -46,36 +33,11 @@ steps:
buildConfig='$(_BuildConfig)'
fi
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
portableBuildArgs=
if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
@@ -83,51 +45,21 @@ steps:
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
+ --restore --build --pack -bl \
+ --source-build \
+ ${{ parameters.platform.buildArguments }} \
$internalRuntimeDownloadArgs \
- $internalRestoreArgs \
$targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
$portableBuildArgs \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
displayName: Build
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish BuildLogs
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
sbomEnabled: false # we don't need SBOM for logs
-
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- template: /eng/common/core-templates/steps/component-governance.yml
- parameters:
- displayName: Component Detection (Exclude upstream cache)
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- componentGovernanceIgnoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
- disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }}
diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml
index 473a22c4719..e9a694afa58 100644
--- a/eng/common/core-templates/steps/source-index-stage1-publish.yml
+++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml
@@ -1,15 +1,15 @@
parameters:
- sourceIndexUploadPackageVersion: 2.0.0-20240522.1
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
+ sourceIndexUploadPackageVersion: 2.0.0-20250818.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
binlogPath: artifacts/log/Debug/Build.binlog
steps:
- task: UseDotNet@2
- displayName: "Source Index: Use .NET 8 SDK"
+ displayName: "Source Index: Use .NET 9 SDK"
inputs:
packageType: sdk
- version: 8.0.x
+ version: 9.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
@@ -20,7 +20,7 @@ steps:
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
-- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: "Source Index: Process Binlog into indexable sln"
- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
index af7c8be0590..2cebc547382 100644
--- a/eng/common/cross/arm64/tizen/tizen.patch
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -5,5 +5,5 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleaarch64)
--GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch
deleted file mode 100644
index 2d261561935..00000000000
--- a/eng/common/cross/armel/armel.jessie.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
---- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
-+++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
-@@ -69,10 +69,10 @@
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- return __sync_val_compare_and_swap_2(addr, old, _new);
-+ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
- #endif
- case 4:
-- return __sync_val_compare_and_swap_4(addr, old, _new);
-+ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_val_compare_and_swap_8(addr, old, _new);
-@@ -109,7 +109,7 @@
- return;
- #endif
- case 4:
-- __sync_and_and_fetch_4(addr, val);
-+ __sync_and_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -148,7 +148,7 @@
- return;
- #endif
- case 4:
-- __sync_or_and_fetch_4(addr, val);
-+ __sync_or_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -187,7 +187,7 @@
- return __sync_add_and_fetch_2(addr, val);
- #endif
- case 4:
-- return __sync_add_and_fetch_4(addr, val);
-+ return __sync_add_and_fetch_4((uint32_t*) addr, val);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_add_and_fetch_8(addr, val);
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 74f399716ba..8abfb71f727 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -164,9 +164,13 @@ while :; do
armel)
__BuildArch=armel
__UbuntuArch=armel
- __UbuntuRepo="http://ftp.debian.org/debian/"
- __CodeName=jessie
+ __UbuntuRepo="http://archive.debian.org/debian/"
+ __CodeName=buster
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+ __LLDB_Package="liblldb-6.0-dev"
+ __UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
+ __UbuntuPackages="${__UbuntuPackages// libomp5/}"
+ __UbuntuSuites=
;;
armv6)
__BuildArch=armv6
@@ -278,44 +282,21 @@ while :; do
;;
xenial) # Ubuntu 16.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=xenial
- fi
- ;;
- zesty) # Ubuntu 17.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=zesty
- fi
+ __CodeName=xenial
;;
bionic) # Ubuntu 18.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=bionic
- fi
+ __CodeName=bionic
;;
focal) # Ubuntu 20.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=focal
- fi
+ __CodeName=focal
;;
jammy) # Ubuntu 22.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=jammy
- fi
+ __CodeName=jammy
;;
noble) # Ubuntu 24.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=noble
- fi
- if [[ -n "$__LLDB_Package" ]]; then
- __LLDB_Package="liblldb-18-dev"
- fi
- ;;
- jessie) # Debian 8
- __CodeName=jessie
- __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
-
- if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ __CodeName=noble
+ if [[ -z "$__LLDB_Package" ]]; then
+ __LLDB_Package="liblldb-19-dev"
fi
;;
stretch) # Debian 9
@@ -333,7 +314,7 @@ while :; do
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ __UbuntuRepo="http://archive.debian.org/debian/"
fi
;;
bullseye) # Debian 11
@@ -473,10 +454,6 @@ if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then
__AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}"
fi
-if [[ "$__BuildArch" == "armel" ]]; then
- __LLDB_Package="lldb-3.5-dev"
-fi
-
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -z "$__UbuntuRepo" ]]; then
@@ -850,12 +827,6 @@ EOF
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
-
- if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
- pushd "$__RootfsDir"
- patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
- popd
- fi
elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
diff --git a/eng/common/cross/tizen-fetch.sh b/eng/common/cross/tizen-fetch.sh
index 28936ceef3a..37c3a61f1de 100755
--- a/eng/common/cross/tizen-fetch.sh
+++ b/eng/common/cross/tizen-fetch.sh
@@ -156,13 +156,8 @@ fetch_tizen_pkgs()
done
}
-if [ "$TIZEN_ARCH" == "riscv64" ]; then
- BASE="Tizen-Base-RISCV"
- UNIFIED="Tizen-Unified-RISCV"
-else
- BASE="Tizen-Base"
- UNIFIED="Tizen-Unified"
-fi
+BASE="Tizen-Base"
+UNIFIED="Tizen-Unified"
Inform "Initialize ${TIZEN_ARCH} base"
fetch_tizen_pkgs_init standard $BASE
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index 36dbd45e1ce..e889f439b8d 100755
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -68,7 +68,7 @@ function InstallDarcCli {
fi
fi
- local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
diff --git a/eng/common/dotnet.cmd b/eng/common/dotnet.cmd
new file mode 100644
index 00000000000..527fa4bb38f
--- /dev/null
+++ b/eng/common/dotnet.cmd
@@ -0,0 +1,7 @@
+@echo off
+
+:: This script is used to install the .NET SDK.
+:: It will also invoke the SDK with any provided arguments.
+
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/dotnet.ps1 b/eng/common/dotnet.ps1
new file mode 100644
index 00000000000..45e5676c9eb
--- /dev/null
+++ b/eng/common/dotnet.ps1
@@ -0,0 +1,11 @@
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+. $PSScriptRoot\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+
+# Invoke acquired SDK with args if they are provided
+if ($args.count -gt 0) {
+ $env:DOTNET_NOLOGO=1
+ & "$dotnetRoot\dotnet.exe" $args
+}
diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh
new file mode 100644
index 00000000000..2ef68235675
--- /dev/null
+++ b/eng/common/dotnet.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+source="${BASH_SOURCE[0]}"
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+source $scriptroot/tools.sh
+InitializeDotNetCli true # install
+
+# Invoke acquired SDK with args if they are provided
+if [[ $# > 0 ]]; then
+ __dotnetDir=${_InitializeDotNetCli}
+ dotnetPath=${__dotnetDir}/dotnet
+ ${dotnetPath} "$@"
+fi
diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
index 524aaa57f2b..fa1cdc2b300 100644
--- a/eng/common/generate-locproject.ps1
+++ b/eng/common/generate-locproject.ps1
@@ -33,15 +33,27 @@ $jsonTemplateFiles | ForEach-Object {
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
+$wxlFilesV3 = @()
+$wxlFilesV5 = @()
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
if (-not $wxlFiles) {
$wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files
if ($wxlEnFiles) {
- $wxlFiles = @()
- $wxlEnFiles | ForEach-Object {
- $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
- $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
- }
+ $wxlFiles = @()
+ $wxlEnFiles | ForEach-Object {
+ $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
+ $content = Get-Content $_.FullName -Raw
+
+ # Split files on schema to select different parser settings in the generated project.
+ if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*")
+ {
+ $wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*")
+ {
+ $wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ }
}
}
@@ -114,7 +126,32 @@ $locJson = @{
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" )
LocItems = @(
- $wxlFiles | ForEach-Object {
+ $wxlFilesV3 | ForEach-Object {
+ $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
+ $continue = $true
+ foreach ($exclusion in $exclusions.Exclusions) {
+ if ($_.FullName.Contains($exclusion)) {
+ $continue = $false
+ }
+ }
+ $sourceFile = ($_.FullName | Resolve-Path -Relative)
+ if ($continue)
+ {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = $outputPath
+ }
+ }
+ }
+ )
+ },
+ @{
+ LanguageSet = $LanguageSet
+ CloneLanguageSet = "WiX_CloneLanguages"
+ LssFiles = @( "P210WxlSchemaV4.lss" )
+ LocItems = @(
+ $wxlFilesV5 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config
index 19d3d311b16..f70261ed689 100644
--- a/eng/common/internal/NuGet.config
+++ b/eng/common/internal/NuGet.config
@@ -4,4 +4,7 @@
+
+
+
diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh
index ce661e9e5cd..477a44f335b 100644
--- a/eng/common/native/install-dependencies.sh
+++ b/eng/common/native/install-dependencies.sh
@@ -27,8 +27,9 @@ case "$os" in
libssl-dev libkrb5-dev pigz cpio
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
- elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ]; then
- dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
+ elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then
+ pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
+ $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
elif [ "$ID" = "alpine" ]; then
apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
else
@@ -44,7 +45,7 @@ case "$os" in
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
# Skip brew update for now, see https://github.com/actions/setup-python/issues/577
# brew update --preinstall
- brew bundle --no-upgrade --no-lock --file=- < Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
}
@@ -34,10 +37,11 @@ function Print-Usage() {
function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
$outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject `
- /bl:$log `
+ $binaryLogArg `
/t:$target `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh
index b9b9e58db9a..3270f83fa9a 100644
--- a/eng/common/sdk-task.sh
+++ b/eng/common/sdk-task.sh
@@ -7,6 +7,11 @@ show_usage() {
echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
echo " --help Print help and exit"
echo ""
+
+ echo "Advanced settings:"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
+ echo " --noWarnAsError Do not warn as error"
+ echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
}
@@ -27,10 +32,12 @@ Build() {
local log_suffix=""
[[ "$target" != "Execute" ]] && log_suffix=".$target"
local log="$log_dir/$task$log_suffix.binlog"
+ local binaryLogArg=""
+ [[ $binary_log == true ]] && binaryLogArg="/bl:$log"
local output_path="$toolset_dir/$task/"
MSBuild "$taskProject" \
- /bl:"$log" \
+ $binaryLogArg \
/t:"$target" \
/p:Configuration="$configuration" \
/p:RepoRoot="$repo_root" \
@@ -39,11 +46,14 @@ Build() {
$properties
}
+binary_log=true
configuration="Debug"
verbosity="minimal"
+exclude_ci_binary_log=false
restore=false
help=false
properties=''
+warnAsError=true
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
@@ -60,6 +70,15 @@ while (($# > 0)); do
verbosity=$2
shift 2
;;
+ --excludecibinarylog|--nobl)
+ binary_log=false
+ exclude_ci_binary_log=true
+ shift 1
+ ;;
+ --noWarnAsError)
+ warnAsError=false
+ shift 1
+ ;;
--help)
help=true
shift 1
@@ -72,8 +91,6 @@ while (($# > 0)); do
done
ci=true
-binaryLog=true
-warnAsError=true
if $help; then
show_usage
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
index 4585cfd6bba..e5f543ea68c 100644
--- a/eng/common/sdl/packages.config
+++ b/eng/common/sdl/packages.config
@@ -1,4 +1,4 @@
-
+
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
index 98bbc1ded0b..4bf4cf41bd7 100644
--- a/eng/common/template-guidance.md
+++ b/eng/common/template-guidance.md
@@ -50,7 +50,7 @@ extends:
- task: CopyFiles@2
displayName: Gather build output
inputs:
- SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel'
+ SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
```
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 817555505aa..92a0664f564 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -3,7 +3,7 @@ parameters:
enableSbom: true
runAsPublic: false
PackageVersion: 9.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
jobs:
- template: /eng/common/core-templates/job/job.yml
@@ -31,6 +31,7 @@ jobs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact
@@ -39,6 +40,7 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
@@ -46,7 +48,7 @@ jobs:
displayName: Publish Logs
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
sbomEnabled: false # we don't need SBOM for logs
diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml
index 100a3fc9849..fcf6637b2eb 100644
--- a/eng/common/templates-official/steps/publish-build-artifacts.yml
+++ b/eng/common/templates-official/steps/publish-build-artifacts.yml
@@ -24,6 +24,10 @@ parameters:
- name: is1ESPipeline
type: boolean
default: true
+
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
@@ -38,4 +42,5 @@ steps:
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}
-
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
index dbdd66d4a4b..f1311bbb1b3 100644
--- a/eng/common/templates-official/variables/sdl-variables.yml
+++ b/eng/common/templates-official/variables/sdl-variables.yml
@@ -4,4 +4,4 @@ variables:
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
+ value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index d1aeb92fcea..238fa0818f7 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -6,7 +6,7 @@ parameters:
enableSbom: true
runAsPublic: false
PackageVersion: 9.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
jobs:
- template: /eng/common/core-templates/job/job.yml
@@ -46,6 +46,7 @@ jobs:
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
@@ -56,6 +57,7 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
@@ -66,7 +68,7 @@ jobs:
displayName: Publish Logs
pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
- artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
@@ -75,7 +77,7 @@ jobs:
parameters:
is1ESPipeline: false
args:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml
index 6428a98dfef..605e602e94d 100644
--- a/eng/common/templates/steps/publish-build-artifacts.yml
+++ b/eng/common/templates/steps/publish-build-artifacts.yml
@@ -25,6 +25,10 @@ parameters:
type: string
default: 'Container'
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
+
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
@@ -37,4 +41,6 @@ steps:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
- ArtifactName: ${{ parameters.artifactName }}
\ No newline at end of file
+ ArtifactName: ${{ parameters.artifactName }}
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates/steps/vmr-sync.yml b/eng/common/templates/steps/vmr-sync.yml
new file mode 100644
index 00000000000..599afb6186b
--- /dev/null
+++ b/eng/common/templates/steps/vmr-sync.yml
@@ -0,0 +1,207 @@
+### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet).
+### They initialize the darc CLI and pull the new updates.
+### Changes are applied locally onto the already cloned VMR (located in $vmrPath).
+
+parameters:
+- name: targetRef
+ displayName: Target revision in dotnet/ to synchronize
+ type: string
+ default: $(Build.SourceVersion)
+
+- name: vmrPath
+ displayName: Path where the dotnet/dotnet is checked out to
+ type: string
+ default: $(Agent.BuildDirectory)/vmr
+
+- name: additionalSyncs
+ displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol
+ type: object
+ default: []
+
+steps:
+- checkout: vmr
+ displayName: Clone dotnet/dotnet
+ path: vmr
+ clean: true
+
+- checkout: self
+ displayName: Clone $(Build.Repository.Name)
+ path: repo
+ fetchDepth: 0
+
+# This step is needed so that when we get a detached HEAD / shallow clone,
+# we still pull the commit into the temporary repo clone to use it during the sync.
+# Also unshallow the clone so that forwardflow command would work.
+- script: |
+ git branch repo-head
+ git rev-parse HEAD
+ displayName: Label PR commit
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml)
+ echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
+ displayName: Obtain the vmr sha from Version.Details.xml (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- powershell: |
+ [xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml
+ $vmr_sha = $xml.SelectSingleNode("//Source").Sha
+ Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
+ displayName: Obtain the vmr sha from Version.Details.xml (Windows)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git fetch --all
+ git checkout $(vmr_sha)
+ displayName: Checkout VMR at correct sha for repo flow
+ workingDirectory: ${{ parameters.vmrPath }}
+
+- script: |
+ git config --global user.name "dotnet-maestro[bot]"
+ git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
+ displayName: Set git author to dotnet-maestro[bot]
+ workingDirectory: ${{ parameters.vmrPath }}
+
+- script: |
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync repo into VMR (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git config --global diff.astextplain.textconv echo
+ git config --system core.longpaths true
+ displayName: Configure Windows git (longpaths, astextplain)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+
+- powershell: |
+ ./eng/common/vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync repo into VMR (Windows)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
+ - task: CopyFiles@2
+ displayName: Collect failed patches
+ condition: failed()
+ inputs:
+ SourceFolder: '$(Agent.TempDirectory)'
+ Contents: '*.patch'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+
+ - publish: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+ artifact: $(System.JobDisplayName)_FailedPatches
+ displayName: Upload failed patches
+ condition: failed()
+
+- ${{ each assetName in parameters.additionalSyncs }}:
+ # The vmr-sync script ends up staging files in the local VMR so we have to commit those
+ - script:
+ git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)"
+ displayName: Commit local VMR changes
+ workingDirectory: ${{ parameters.vmrPath }}
+
+ - script: |
+ set -ex
+
+ echo "Searching for details of asset ${{ assetName }}..."
+
+ # Use darc to get dependencies information
+ dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci)
+
+ # Extract repository URL and commit hash
+ repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1)
+
+ if [ -z "$repository" ]; then
+ echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ fi
+
+ commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1)
+
+ echo "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync ${{ assetName }} into (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+ - powershell: |
+ $ErrorActionPreference = 'Stop'
+
+ Write-Host "Searching for details of asset ${{ assetName }}..."
+
+ $dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci
+
+ $repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1
+ $repository -match 'Repo:\s+([^\s]+)' | Out-Null
+ $repository = $matches[1]
+
+ if ($repository -eq $null) {
+ Write-Error "Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ }
+
+ $commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1
+ $commit -match 'Commit:\s+([^\s]+)' | Out-Null
+ $commit = $matches[1]
+
+ Write-Host "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ .\eng\common\vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync ${{ assetName }} into (Windows)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml
new file mode 100644
index 00000000000..ce3c29a62fa
--- /dev/null
+++ b/eng/common/templates/vmr-build-pr.yml
@@ -0,0 +1,42 @@
+# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
+#
+# It will run a full set of verification jobs defined in:
+# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
+#
+# For repos that do not need to run the full set, you would do the following:
+#
+# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
+#
+# 2. Add `verifications` parameter to VMR template reference
+#
+# Examples:
+# - For source-build stage 1 verification, add the following:
+# verifications: [ "source-build-stage1" ]
+#
+# - For Windows only verifications, add the following:
+# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
+
+trigger: none
+pr: none
+
+variables:
+- template: /eng/common/templates/variables/pool-providers.yml@self
+
+- name: skipComponentGovernanceDetection # we run CG on internal builds only
+ value: true
+
+- name: Codeql.Enabled # we run CodeQL on internal builds only
+ value: false
+
+resources:
+ repositories:
+ - repository: vmr
+ type: github
+ name: dotnet/dotnet
+ endpoint: dotnet
+
+stages:
+- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
+ parameters:
+ isBuiltFromVmr: false
+ scope: lite
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index 95ccdf82e4b..06b44de7870 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -65,10 +65,8 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
-# True if the build is a product build
-[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false }
-
-[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() }
+# True when the build is running within the VMR.
+[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false }
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
@@ -259,10 +257,23 @@ function Retry($downloadBlock, $maxRetries = 5) {
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ $shouldDownload = $false
+
if (!(Test-Path $installScript)) {
+ $shouldDownload = $true
+ } else {
+ # Check if the script is older than 30 days
+ $fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime
+ if ($fileAge.Days -gt 30) {
+ Write-Host "Existing install script is too old, re-downloading..."
+ $shouldDownload = $true
+ }
+ }
+
+ if ($shouldDownload) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1"
+ $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
Retry({
Write-Host "GET $uri"
@@ -416,7 +427,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# Locate Visual Studio installation or download x-copy msbuild.
$vsInfo = LocateVisualStudio $vsRequirements
- if ($vsInfo -ne $null) {
+ if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) {
# Ensure vsInstallDir has a trailing slash
$vsInstallDir = Join-Path $vsInfo.installationPath "\"
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
@@ -533,7 +544,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = '2.5.2'
+ # keep this in sync with the VSWhereVersion in DefaultVersions.props
+ $vswhereVersion = '3.1.7'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
@@ -541,7 +553,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host 'Downloading vswhere'
+ Write-Host "Downloading vswhere $vswhereVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
})
@@ -646,7 +659,6 @@ function GetNuGetPackageCachePath() {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
- $env:RESTORENOHTTPCACHE = $true
}
}
@@ -768,28 +780,13 @@ function MSBuild() {
$toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject
- $possiblePaths = @(
- # new scripts need to work with old packages, so we need to look for the old names/versions
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
-
- # This list doesn't need to be updated anymore and can eventually be removed.
- (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
- )
- $selectedPath = $null
- foreach ($path in $possiblePaths) {
- if (Test-Path $path -PathType Leaf) {
- $selectedPath = $path
- break
- }
- }
+ $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')
+
if (-not $selectedPath) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.'
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
}
+
$args += "/logger:$selectedPath"
}
@@ -852,8 +849,8 @@ function MSBuild-Core() {
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) {
+ # Skip this when the build is a child of the VMR build.
+ if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 4a5fa99478d..c1841c9dfd0 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -5,6 +5,9 @@
# CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false}
+# Build mode
+source_build=${source_build:-false}
+
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
@@ -58,7 +61,8 @@ use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
-if [[ "$ci" == true ]]; then
+# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props.
+if [[ "$ci" == true || "$source_build" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
else
use_global_nuget_cache=${use_global_nuget_cache:-true}
@@ -68,8 +72,8 @@ fi
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
-# True if the build is a product build
-product_build=${product_build:-false}
+# True when the build is running within the VMR.
+from_vmr=${from_vmr:-false}
# Resolve any symlinks in the given path.
function ResolvePath {
@@ -295,9 +299,30 @@ function with_retries {
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
- local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh"
+ local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local timestamp_file="$root/.dotnet-install.timestamp"
+ local should_download=false
if [[ ! -a "$install_script" ]]; then
+ should_download=true
+ elif [[ -f "$timestamp_file" ]]; then
+ # Check if the script is older than 30 days using timestamp file
+ local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0")
+ local current_time=$(date +%s)
+ local age_seconds=$((current_time - download_time))
+
+ # 30 days = 30 * 24 * 60 * 60 = 2592000 seconds
+ if [[ $age_seconds -gt 2592000 ]]; then
+ echo "Existing install script is too old, re-downloading..."
+ should_download=true
+ fi
+ else
+ # No timestamp file exists, assume script is old and re-download
+ echo "No timestamp found for existing install script, re-downloading..."
+ should_download=true
+ fi
+
+ if [[ "$should_download" == true ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
@@ -324,6 +349,9 @@ function GetDotNetInstallScript {
ExitWithExitCode $exit_code
}
fi
+
+ # Create timestamp file to track download time in seconds from epoch
+ date +%s > "$timestamp_file"
fi
# return value
_GetDotNetInstallScript="$install_script"
@@ -341,14 +369,12 @@ function InitializeBuildTool {
_InitializeBuildToolCommand="msbuild"
}
-# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages/"
else
export NUGET_PACKAGES="$repo_root/.packages/"
- export RESTORENOHTTPCACHE=true
fi
fi
@@ -445,27 +471,13 @@ function MSBuild {
fi
local toolset_dir="${_InitializeToolset%/*}"
- # new scripts need to work with old packages, so we need to look for the old names/versions
- local selectedPath=
- local possiblePaths=()
- possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.Arcade.Sdk.dll" )
-
- # This list doesn't need to be updated anymore and can eventually be removed.
- possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
- for path in "${possiblePaths[@]}"; do
- if [[ -f $path ]]; then
- selectedPath=$path
- break
- fi
- done
+ local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll"
+
if [[ -z "$selectedPath" ]]; then
- Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly."
+ Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
fi
+
args+=( "-logger:$selectedPath" )
fi
@@ -502,8 +514,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then
+ # Skip this when the build is a child of the VMR build.
+ if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
@@ -526,6 +538,7 @@ function GetDarc {
fi
"$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version
+ darc_tool="$darc_path/darc"
}
# Returns a full path to an Arcade SDK task project file.
diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1
new file mode 100644
index 00000000000..97302f3205b
--- /dev/null
+++ b/eng/common/vmr-sync.ps1
@@ -0,0 +1,138 @@
+<#
+.SYNOPSIS
+
+This script is used for synchronizing the current repository into a local VMR.
+It pulls the current repository's code into the specified VMR directory for local testing or
+Source-Build validation.
+
+.DESCRIPTION
+
+The tooling used for synchronization will clone the VMR repository into a temporary folder if
+it does not already exist. These clones can be reused in future synchronizations, so it is
+recommended to dedicate a folder for this to speed up re-runs.
+
+.EXAMPLE
+ Synchronize current repository into a local VMR:
+ ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
+
+.PARAMETER tmpDir
+Required. Path to the temporary folder where repositories will be cloned
+
+.PARAMETER vmrBranch
+Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+
+.PARAMETER azdevPat
+Optional. Azure DevOps PAT to use for cloning private repositories.
+
+.PARAMETER vmrDir
+Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+.PARAMETER debugOutput
+Optional. Enables debug logging in the darc vmr command.
+
+.PARAMETER ci
+Optional. Denotes that the script is running in a CI environment.
+#>
+param (
+ [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
+ [string][Alias('t', 'tmp')]$tmpDir,
+ [string][Alias('b', 'branch')]$vmrBranch,
+ [string]$remote,
+ [string]$azdevPat,
+ [string][Alias('v', 'vmr')]$vmrDir,
+ [switch]$ci,
+ [switch]$debugOutput
+)
+
+function Fail {
+ Write-Host "> $($args[0])" -ForegroundColor 'Red'
+}
+
+function Highlight {
+ Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
+}
+
+$verbosity = 'verbose'
+if ($debugOutput) {
+ $verbosity = 'debug'
+}
+# Validation
+
+if (-not $tmpDir) {
+ Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+}
+
+# Sanitize the input
+
+if (-not $vmrDir) {
+ $vmrDir = Join-Path $tmpDir 'dotnet'
+}
+
+if (-not (Test-Path -Path $tmpDir -PathType Container)) {
+ New-Item -ItemType Directory -Path $tmpDir | Out-Null
+}
+
+# Prepare the VMR
+
+if (-not (Test-Path -Path $vmrDir -PathType Container)) {
+ Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
+ git clone https://github.com/dotnet/dotnet $vmrDir
+
+ if ($vmrBranch) {
+ git -C $vmrDir switch -c $vmrBranch
+ }
+}
+else {
+ if ((git -C $vmrDir diff --quiet) -eq $false) {
+ Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
+ exit 1
+ }
+
+ if ($vmrBranch) {
+ Highlight "Preparing $vmrDir"
+ git -C $vmrDir checkout $vmrBranch
+ git -C $vmrDir pull
+ }
+}
+
+Set-StrictMode -Version Latest
+
+# Prepare darc
+
+Highlight 'Installing .NET, preparing the tooling..'
+. .\eng\common\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+$darc = Get-Darc
+$dotnet = "$dotnetRoot\dotnet.exe"
+
+Highlight "Starting the synchronization of VMR.."
+
+# Synchronize the VMR
+$darcArgs = (
+ "vmr", "forwardflow",
+ "--tmp", $tmpDir,
+ "--$verbosity",
+ $vmrDir
+)
+
+if ($ci) {
+ $darcArgs += ("--ci")
+}
+
+if ($azdevPat) {
+ $darcArgs += ("--azdev-pat", $azdevPat)
+}
+
+& "$darc" $darcArgs
+
+if ($LASTEXITCODE -eq 0) {
+ Highlight "Synchronization succeeded"
+}
+else {
+ Fail "Synchronization of repo to VMR failed!"
+ Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
+ Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
+ Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
+ exit 1
+}
diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh
new file mode 100644
index 00000000000..44239e331c0
--- /dev/null
+++ b/eng/common/vmr-sync.sh
@@ -0,0 +1,207 @@
+#!/bin/bash
+
+### This script is used for synchronizing the current repository into a local VMR.
+### It pulls the current repository's code into the specified VMR directory for local testing or
+### Source-Build validation.
+###
+### The tooling used for synchronization will clone the VMR repository into a temporary folder if
+### it does not already exist. These clones can be reused in future synchronizations, so it is
+### recommended to dedicate a folder for this to speed up re-runs.
+###
+### USAGE:
+### Synchronize current repository into a local VMR:
+### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet"
+###
+### Options:
+### -t, --tmp, --tmp-dir PATH
+### Required. Path to the temporary folder where repositories will be cloned
+###
+### -b, --branch, --vmr-branch BRANCH_NAME
+### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+###
+### --debug
+### Optional. Turns on the most verbose logging for the VMR tooling
+###
+### --remote name:URI
+### Optional. Additional remote to use during the synchronization
+### This can be used to synchronize to a commit from a fork of the repository
+### Example: 'runtime:https://github.com/yourfork/runtime'
+###
+### --azdev-pat
+### Optional. Azure DevOps PAT to use for cloning private repositories.
+###
+### -v, --vmr, --vmr-dir PATH
+### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+function print_help () {
+ sed -n '/^### /,/^$/p' "$source" | cut -b 5-
+}
+
+COLOR_RED=$(tput setaf 1 2>/dev/null || true)
+COLOR_CYAN=$(tput setaf 6 2>/dev/null || true)
+COLOR_CLEAR=$(tput sgr0 2>/dev/null || true)
+COLOR_RESET=uniquesearchablestring
+FAILURE_PREFIX='> '
+
+function fail () {
+ echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2
+}
+
+function highlight () {
+ echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}"
+}
+
+tmp_dir=''
+vmr_dir=''
+vmr_branch=''
+additional_remotes=''
+verbosity=verbose
+azdev_pat=''
+ci=false
+
+while [[ $# -gt 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -t|--tmp|--tmp-dir)
+ tmp_dir=$2
+ shift
+ ;;
+ -v|--vmr|--vmr-dir)
+ vmr_dir=$2
+ shift
+ ;;
+ -b|--branch|--vmr-branch)
+ vmr_branch=$2
+ shift
+ ;;
+ --remote)
+ additional_remotes="$additional_remotes $2"
+ shift
+ ;;
+ --azdev-pat)
+ azdev_pat=$2
+ shift
+ ;;
+ --ci)
+ ci=true
+ ;;
+ -d|--debug)
+ verbosity=debug
+ ;;
+ -h|--help)
+ print_help
+ exit 0
+ ;;
+ *)
+ fail "Invalid argument: $1"
+ print_help
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# Validation
+
+if [[ -z "$tmp_dir" ]]; then
+ fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+fi
+
+# Sanitize the input
+
+if [[ -z "$vmr_dir" ]]; then
+ vmr_dir="$tmp_dir/dotnet"
+fi
+
+if [[ ! -d "$tmp_dir" ]]; then
+ mkdir -p "$tmp_dir"
+fi
+
+if [[ "$verbosity" == "debug" ]]; then
+ set -x
+fi
+
+# Prepare the VMR
+
+if [[ ! -d "$vmr_dir" ]]; then
+ highlight "Cloning 'dotnet/dotnet' into $vmr_dir.."
+ git clone https://github.com/dotnet/dotnet "$vmr_dir"
+
+ if [[ -n "$vmr_branch" ]]; then
+ git -C "$vmr_dir" switch -c "$vmr_branch"
+ fi
+else
+ if ! git -C "$vmr_dir" diff --quiet; then
+ fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes"
+ exit 1
+ fi
+
+ if [[ -n "$vmr_branch" ]]; then
+ highlight "Preparing $vmr_dir"
+ git -C "$vmr_dir" checkout "$vmr_branch"
+ git -C "$vmr_dir" pull
+ fi
+fi
+
+set -e
+
+# Prepare darc
+
+highlight 'Installing .NET, preparing the tooling..'
+source "./eng/common/tools.sh"
+InitializeDotNetCli true
+GetDarc
+dotnetDir=$( cd ./.dotnet/; pwd -P )
+dotnet=$dotnetDir/dotnet
+
+highlight "Starting the synchronization of VMR.."
+set +e
+
+if [[ -n "$additional_remotes" ]]; then
+ additional_remotes="--additional-remotes $additional_remotes"
+fi
+
+if [[ -n "$azdev_pat" ]]; then
+ azdev_pat="--azdev-pat $azdev_pat"
+fi
+
+ci_arg=''
+if [[ "$ci" == "true" ]]; then
+ ci_arg="--ci"
+fi
+
+# Synchronize the VMR
+
+export DOTNET_ROOT="$dotnetDir"
+
+"$darc_tool" vmr forwardflow \
+ --tmp "$tmp_dir" \
+ $azdev_pat \
+ --$verbosity \
+ $ci_arg \
+ $additional_remotes \
+ "$vmr_dir"
+
+if [[ $? == 0 ]]; then
+ highlight "Synchronization succeeded"
+else
+ fail "Synchronization of repo to VMR failed!"
+ fail "'$vmr_dir' is left in its last state (re-run of this script will reset it)."
+ fail "Please inspect the logs which contain path to the failing patch file (use --debug to get all the details)."
+ fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
+ exit 1
+fi
diff --git a/global.json b/global.json
index 219831980cc..f3f6675c728 100644
--- a/global.json
+++ b/global.json
@@ -1,9 +1,9 @@
{
"sdk": {
- "version": "10.0.100-preview.3.25125.5"
+ "version": "10.0.100-rc.1.25451.107"
},
"tools": {
- "dotnet": "10.0.100-preview.3.25125.5",
+ "dotnet": "10.0.100-rc.1.25451.107",
"runtimes": {
"dotnet/x64": [
"8.0.0",
@@ -20,7 +20,7 @@
"msbuild-sdks": {
"Microsoft.Build.NoTargets": "3.7.0",
"Microsoft.Build.Traversal": "3.2.0",
- "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25126.4",
- "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.25126.4"
+ "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25512.102",
+ "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.25512.102"
}
}