diff --git a/eng/Versions.props b/eng/Versions.props
index d05ddae5d..4701dd1a3 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -158,7 +158,7 @@
removed. See https://github.com/dotnet/source-build/issues/2295 -->
15.7.179
15.7.179
- 0.1.0-6.0.100-bootstrap.6
+ 0.1.0-6.0.100-bootstrap.11
diff --git a/src/SourceBuild/tarball/content/Directory.Build.props b/src/SourceBuild/tarball/content/Directory.Build.props
index 2cd961ae5..c157ae5e2 100644
--- a/src/SourceBuild/tarball/content/Directory.Build.props
+++ b/src/SourceBuild/tarball/content/Directory.Build.props
@@ -137,6 +137,7 @@
$(GitInfoOfflineDir)$(RepositoryName).props
$(GitInfoOfflineDir)AllRepoVersions.props
$(BaseOutputPath)prebuilt-report/
+ $(PackageReportDir)prebuilt-packages/
$(PackageReportDir)prebuilt-usage.xml
$(PackageReportDir)poison-usage.xml
$(PackageReportDir)poison-catalog.xml
diff --git a/src/SourceBuild/tarball/content/TemporaryBootstrapPackageVersions.props b/src/SourceBuild/tarball/content/TemporaryBootstrapPackageVersions.props
new file mode 100755
index 000000000..235106b6b
--- /dev/null
+++ b/src/SourceBuild/tarball/content/TemporaryBootstrapPackageVersions.props
@@ -0,0 +1,21 @@
+
+
+
+
+ 3.1.0
+ 3.1.0
+ 6.0.0-preview.6.21352.12
+ 6.0.0-preview.6.21352.12
+ 5.0.0
+ 5.0.0
+ 6.0.0-preview.6.21352.12
+ 6.0.0-preview.6.21352.12
+
+
diff --git a/src/SourceBuild/tarball/content/eng/Versions.props b/src/SourceBuild/tarball/content/eng/Versions.props
index 227fd37d9..c03787c3d 100644
--- a/src/SourceBuild/tarball/content/eng/Versions.props
+++ b/src/SourceBuild/tarball/content/eng/Versions.props
@@ -21,6 +21,6 @@
- 0.1.0-6.0.100-bootstrap.6
+ 0.1.0-6.0.100-bootstrap.11
diff --git a/src/SourceBuild/tarball/content/patches/arcade/0001-Update-TFM-to-net6.0.patch b/src/SourceBuild/tarball/content/patches/arcade/0001-Update-TFM-to-net6.0.patch
new file mode 100644
index 000000000..f577d4158
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/arcade/0001-Update-TFM-to-net6.0.patch
@@ -0,0 +1,122 @@
+From 5288390142a40beb392f5e11380b370ab696830c Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Wed, 21 Jul 2021 21:10:15 +0000
+Subject: [PATCH] Update TFM to net6.0
+
+When building from source, only the current TFM is built. Update
+TargetFrameworks to only have net6.0.
+---
+ eng/TargetFrameworkDefaults.props | 2 +-
+ .../Microsoft.DotNet.Deployment.Tasks.Links.csproj | 2 +-
+ .../tasks/Microsoft.DotNet.NuGetRepack.Tasks.csproj | 2 +-
+ .../Microsoft.DotNet.GenFacades.csproj | 1 +
+ .../Microsoft.DotNet.PackageTesting.csproj | 1 +
+ .../Microsoft.DotNet.SharedFramework.Sdk.csproj | 1 +
+ src/Microsoft.DotNet.SignTool/Microsoft.DotNet.SignTool.csproj | 2 +-
+ .../tasks/Microsoft.DotNet.VersionTools.Tasks.csproj | 2 +-
+ 8 files changed, 8 insertions(+), 5 deletions(-)
+
+diff --git a/eng/TargetFrameworkDefaults.props b/eng/TargetFrameworkDefaults.props
+index 89f2a8eb..ca3546e8 100644
+--- a/eng/TargetFrameworkDefaults.props
++++ b/eng/TargetFrameworkDefaults.props
+@@ -6,7 +6,7 @@
+ -->
+
+ netcoreapp3.1
+- net5.0
++ net6.0
+
+
+
+diff --git a/src/Microsoft.DotNet.Deployment.Tasks.Links/Microsoft.DotNet.Deployment.Tasks.Links.csproj b/src/Microsoft.DotNet.Deployment.Tasks.Links/Microsoft.DotNet.Deployment.Tasks.Links.csproj
+index 30474e21..c964fbea 100644
+--- a/src/Microsoft.DotNet.Deployment.Tasks.Links/Microsoft.DotNet.Deployment.Tasks.Links.csproj
++++ b/src/Microsoft.DotNet.Deployment.Tasks.Links/Microsoft.DotNet.Deployment.Tasks.Links.csproj
+@@ -3,7 +3,7 @@
+
+
+ netcoreapp3.1;net472
+- netcoreapp3.1
++ net6.0
+
+ true
+ Aka.ms link manager
+diff --git a/src/Microsoft.DotNet.NuGetRepack/tasks/Microsoft.DotNet.NuGetRepack.Tasks.csproj b/src/Microsoft.DotNet.NuGetRepack/tasks/Microsoft.DotNet.NuGetRepack.Tasks.csproj
+index 6a1cee07..e01890c8 100644
+--- a/src/Microsoft.DotNet.NuGetRepack/tasks/Microsoft.DotNet.NuGetRepack.Tasks.csproj
++++ b/src/Microsoft.DotNet.NuGetRepack/tasks/Microsoft.DotNet.NuGetRepack.Tasks.csproj
+@@ -2,7 +2,7 @@
+
+
+ net472;netcoreapp3.1
+- netcoreapp3.1
++ net6.0
+
+ true
+ MSBuildSdk
+diff --git a/src/Microsoft.DotNet.GenFacades/Microsoft.DotNet.GenFacades.csproj b/src/Microsoft.DotNet.GenFacades/Microsoft.DotNet.GenFacades.csproj
+index db51ae17..bca9958d 100644
+--- a/src/Microsoft.DotNet.GenFacades/Microsoft.DotNet.GenFacades.csproj
++++ b/src/Microsoft.DotNet.GenFacades/Microsoft.DotNet.GenFacades.csproj
+@@ -2,6 +2,7 @@
+
+
+ $(TargetFrameworkForNETSDK);net472
++ $(TargetFrameworkForNETSDK)
+ MSBuildSdk
+ false
+ true
+diff --git a/src/Microsoft.DotNet.PackageTesting/Microsoft.DotNet.PackageTesting.csproj b/src/Microsoft.DotNet.PackageTesting/Microsoft.DotNet.PackageTesting.csproj
+index 2f35e4aa..dfe69f4c 100644
+--- a/src/Microsoft.DotNet.PackageTesting/Microsoft.DotNet.PackageTesting.csproj
++++ b/src/Microsoft.DotNet.PackageTesting/Microsoft.DotNet.PackageTesting.csproj
+@@ -2,6 +2,7 @@
+
+
+ netcoreapp3.1;net472
++ net6.0
+ false
+ MSBuildSdk
+ false
+diff --git a/src/Microsoft.DotNet.SharedFramework.Sdk/Microsoft.DotNet.SharedFramework.Sdk.csproj b/src/Microsoft.DotNet.SharedFramework.Sdk/Microsoft.DotNet.SharedFramework.Sdk.csproj
+index 4405a1fe..a93edfaa 100644
+--- a/src/Microsoft.DotNet.SharedFramework.Sdk/Microsoft.DotNet.SharedFramework.Sdk.csproj
++++ b/src/Microsoft.DotNet.SharedFramework.Sdk/Microsoft.DotNet.SharedFramework.Sdk.csproj
+@@ -2,6 +2,7 @@
+
+
+ net472;netcoreapp3.1
++ net6.0
+ preview
+ false
+
+diff --git a/src/Microsoft.DotNet.SignTool/Microsoft.DotNet.SignTool.csproj b/src/Microsoft.DotNet.SignTool/Microsoft.DotNet.SignTool.csproj
+index 8ec571ae..7a89dfe9 100644
+--- a/src/Microsoft.DotNet.SignTool/Microsoft.DotNet.SignTool.csproj
++++ b/src/Microsoft.DotNet.SignTool/Microsoft.DotNet.SignTool.csproj
+@@ -2,7 +2,7 @@
+
+
+ net472;netcoreapp3.1
+- netcoreapp3.1
++ net6.0
+ true
+ Latest
+ true
+diff --git a/src/Microsoft.DotNet.VersionTools/tasks/Microsoft.DotNet.VersionTools.Tasks.csproj b/src/Microsoft.DotNet.VersionTools/tasks/Microsoft.DotNet.VersionTools.Tasks.csproj
+index 208ffb03..2dfa124f 100644
+--- a/src/Microsoft.DotNet.VersionTools/tasks/Microsoft.DotNet.VersionTools.Tasks.csproj
++++ b/src/Microsoft.DotNet.VersionTools/tasks/Microsoft.DotNet.VersionTools.Tasks.csproj
+@@ -2,7 +2,7 @@
+
+
+ net472;netcoreapp3.1
+- netcoreapp3.1
++ net6.0
+ MSBuildSdk
+
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/arcade/0002-Exclude-test-projects-from-source-build.patch b/src/SourceBuild/tarball/content/patches/arcade/0002-Exclude-test-projects-from-source-build.patch
new file mode 100644
index 000000000..67bbb7be1
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/arcade/0002-Exclude-test-projects-from-source-build.patch
@@ -0,0 +1,37 @@
+From 9531b8ea8fab44bf8b9b19c64c393e0d2d5907c4 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Wed, 21 Jul 2021 22:07:46 +0000
+Subject: [PATCH 1/2] Exclude test projects from source-build
+
+---
+ .../Microsoft.Arcade.Common.Tests.csproj | 1 +
+ .../Microsoft.Arcade.Test.Common.csproj | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/src/Common/Microsoft.Arcade.Common.Tests/Microsoft.Arcade.Common.Tests.csproj b/src/Common/Microsoft.Arcade.Common.Tests/Microsoft.Arcade.Common.Tests.csproj
+index 653588d2..98b20958 100644
+--- a/src/Common/Microsoft.Arcade.Common.Tests/Microsoft.Arcade.Common.Tests.csproj
++++ b/src/Common/Microsoft.Arcade.Common.Tests/Microsoft.Arcade.Common.Tests.csproj
+@@ -3,6 +3,7 @@
+
+ netcoreapp3.1
+ enable
++ true
+
+
+
+diff --git a/src/Common/Microsoft.Arcade.Test.Common/Microsoft.Arcade.Test.Common.csproj b/src/Common/Microsoft.Arcade.Test.Common/Microsoft.Arcade.Test.Common.csproj
+index bb3c5eeb..bcc3d717 100644
+--- a/src/Common/Microsoft.Arcade.Test.Common/Microsoft.Arcade.Test.Common.csproj
++++ b/src/Common/Microsoft.Arcade.Test.Common/Microsoft.Arcade.Test.Common.csproj
+@@ -3,6 +3,7 @@
+
+ netcoreapp3.1;net472
+ true
++ true
+
+
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/arcade/0003-Remove-net472-TFM.patch b/src/SourceBuild/tarball/content/patches/arcade/0003-Remove-net472-TFM.patch
new file mode 100644
index 000000000..e34352c51
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/arcade/0003-Remove-net472-TFM.patch
@@ -0,0 +1,24 @@
+From 63ab09a985b91f4b30a58dc113abc65d34298a12 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Wed, 21 Jul 2021 22:09:10 +0000
+Subject: [PATCH 2/2] Remove net472 TFM
+
+---
+ .../Microsoft.Arcade.Common/Microsoft.Arcade.Common.csproj | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/Common/Microsoft.Arcade.Common/Microsoft.Arcade.Common.csproj b/src/Common/Microsoft.Arcade.Common/Microsoft.Arcade.Common.csproj
+index 324725f5..ac04f517 100644
+--- a/src/Common/Microsoft.Arcade.Common/Microsoft.Arcade.Common.csproj
++++ b/src/Common/Microsoft.Arcade.Common/Microsoft.Arcade.Common.csproj
+@@ -2,6 +2,7 @@
+
+
+ net472;netstandard2.0
++ netstandard2.0
+ true
+
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/arcade/0004-Use-property-instead-of-hardcoded-version.patch b/src/SourceBuild/tarball/content/patches/arcade/0004-Use-property-instead-of-hardcoded-version.patch
new file mode 100644
index 000000000..aea25220b
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/arcade/0004-Use-property-instead-of-hardcoded-version.patch
@@ -0,0 +1,25 @@
+From 05673a6eb5004131cd42eda6d372e94c1dfb3165 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Wed, 21 Jul 2021 22:25:51 +0000
+Subject: [PATCH] Use property instead of hardcoded version
+
+---
+ .../src/Microsoft.DotNet.Build.Tasks.Packaging.csproj | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/Microsoft.DotNet.Build.Tasks.Packaging/src/Microsoft.DotNet.Build.Tasks.Packaging.csproj b/src/Microsoft.DotNet.Build.Tasks.Packaging/src/Microsoft.DotNet.Build.Tasks.Packaging.csproj
+index 31562d59..200fd507 100644
+--- a/src/Microsoft.DotNet.Build.Tasks.Packaging/src/Microsoft.DotNet.Build.Tasks.Packaging.csproj
++++ b/src/Microsoft.DotNet.Build.Tasks.Packaging/src/Microsoft.DotNet.Build.Tasks.Packaging.csproj
+@@ -89,7 +89,7 @@
+
+
+ <_candidatePackageFolder>%(_candidatPackageFolders.Identity)
+- <_runtimeJsonSubPath>Microsoft.NETCore.Platforms\2.1.0\runtime.json
++ <_runtimeJsonSubPath>Microsoft.NETCore.Platforms\$(MicrosoftNETCorePlatformsVersion)\runtime.json
+ <_runtimeJsonPath Condition="'$(_runtimeJsonPath)' == '' AND Exists('$(_candidatePackageFolder)\$(_runtimeJsonSubPath)')">$(_candidatePackageFolder)\$(_runtimeJsonSubPath)
+ <_runtimeJsonPath Condition="'$(_runtimeJsonPath)' == '' AND Exists('$(_candidatePackageFolder)\$(_runtimeJsonSubPath.ToLower())')">$(_candidatePackageFolder)\$(_runtimeJsonSubPath.ToLower())
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/source-build-reference-packages/0001-Update-common.patch b/src/SourceBuild/tarball/content/patches/source-build-reference-packages/0001-Update-common.patch
new file mode 100644
index 000000000..5b629c610
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/source-build-reference-packages/0001-Update-common.patch
@@ -0,0 +1,6374 @@
+From 8215b2687e8619f0556583f020634b038b0e62ea Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Mon, 26 Jul 2021 21:30:11 +0000
+Subject: [PATCH] Update common
+
+Update eng/common directory to include current versions that work with
+6.0 version of arcade. This is needed until arcade is updated in
+the SBRP repo.
+See https://github.com/dotnet/source-build-reference-packages/pull/219
+---
+ eng/common/SetupNugetSources.ps1 | 5 +-
+ eng/common/SetupNugetSources.sh | 4 +-
+ eng/common/build.ps1 | 4 +-
+ eng/common/build.sh | 11 +-
+ eng/common/cross/arm64/tizen-build-rootfs.sh | 0
+ eng/common/cross/arm64/tizen-fetch.sh | 2 +-
+ eng/common/cross/armel/armel.jessie.patch | 43 +
+ eng/common/cross/armel/tizen-build-rootfs.sh | 0
+ eng/common/cross/armel/tizen-fetch.sh | 2 +-
+ eng/common/cross/build-android-rootfs.sh | 2 +-
+ eng/common/cross/build-rootfs.sh | 56 +-
+ eng/common/cross/s390x/sources.list.bionic | 11 +
+ eng/common/cross/toolchain.cmake | 11 +-
+ eng/common/darc-init.sh | 2 +-
+ .../dotnet-install-scripts/dotnet-install.ps1 | 774 -----------
+ .../dotnet-install-scripts/dotnet-install.sh | 1133 -----------------
+ eng/common/dotnet-install.sh | 13 +-
+ eng/common/generate-locproject.ps1 | 117 ++
+ eng/common/init-tools-native.sh | 113 +-
+ eng/common/internal-feed-operations.ps1 | 8 +-
+ eng/common/internal-feed-operations.sh | 6 +-
+ eng/common/msbuild.ps1 | 1 +
+ eng/common/msbuild.sh | 2 +-
+ eng/common/native/CommonLibrary.psm1 | 2 +-
+ eng/common/native/common-library.sh | 0
+ eng/common/native/install-cmake-test.sh | 6 +-
+ eng/common/native/install-cmake.sh | 6 +-
+ eng/common/native/install-tool.ps1 | 2 +-
+ eng/common/performance/blazor_perf.proj | 30 -
+ eng/common/performance/crossgen_perf.proj | 69 -
+ eng/common/performance/microbenchmarks.proj | 144 ---
+ eng/common/performance/performance-setup.ps1 | 147 ---
+ eng/common/performance/performance-setup.sh | 289 -----
+ eng/common/pipeline-logging-functions.ps1 | 104 +-
+ eng/common/pipeline-logging-functions.sh | 34 +-
+ eng/common/post-build/post-build-utils.ps1 | 6 +-
+ eng/common/post-build/publish-using-darc.ps1 | 14 +-
+ .../post-build/sourcelink-validation.ps1 | 95 +-
+ eng/common/post-build/symbols-validation.ps1 | 182 ++-
+ eng/common/sdk-task.ps1 | 8 +-
+ eng/common/sdl/execute-all-sdl-tools.ps1 | 6 +-
+ eng/common/sdl/init-sdl.ps1 | 16 +-
+ eng/common/sdl/packages.config | 2 +-
+ eng/common/sdl/push-gdn.ps1 | 69 -
+ eng/common/sdl/run-sdl.ps1 | 2 +-
+ eng/common/templates/job/execute-sdl.yml | 4 +-
+ eng/common/templates/job/job.yml | 30 +-
+ eng/common/templates/job/onelocbuild.yml | 93 ++
+ eng/common/templates/job/performance.yml | 95 --
+ .../templates/job/publish-build-assets.yml | 8 +
+ eng/common/templates/job/source-build.yml | 11 +
+ .../templates/job/source-index-stage1.yml | 62 +
+ eng/common/templates/jobs/jobs.yml | 29 +-
+ eng/common/templates/jobs/source-build.yml | 12 +-
+ .../templates/phases/publish-build-assets.yml | 1 +
+ .../channels/generic-internal-channel.yml | 8 +
+ .../channels/generic-public-channel.yml | 8 +
+ .../templates/post-build/post-build.yml | 402 +++---
+ .../post-build/setup-maestro-vars.yml | 1 +
+ .../templates/steps/perf-send-to-helix.yml | 50 -
+ eng/common/templates/steps/send-to-helix.yml | 44 +-
+ eng/common/templates/steps/source-build.yml | 7 +-
+ eng/common/tools.ps1 | 166 ++-
+ eng/common/tools.sh | 106 +-
+ 64 files changed, 1272 insertions(+), 3418 deletions(-)
+ mode change 100644 => 100755 eng/common/SetupNugetSources.sh
+ mode change 100644 => 100755 eng/common/cross/arm64/tizen-build-rootfs.sh
+ mode change 100644 => 100755 eng/common/cross/arm64/tizen-fetch.sh
+ create mode 100644 eng/common/cross/armel/armel.jessie.patch
+ mode change 100644 => 100755 eng/common/cross/armel/tizen-build-rootfs.sh
+ mode change 100644 => 100755 eng/common/cross/armel/tizen-fetch.sh
+ mode change 100644 => 100755 eng/common/cross/build-android-rootfs.sh
+ mode change 100644 => 100755 eng/common/cross/build-rootfs.sh
+ create mode 100644 eng/common/cross/s390x/sources.list.bionic
+ delete mode 100644 eng/common/dotnet-install-scripts/dotnet-install.ps1
+ delete mode 100644 eng/common/dotnet-install-scripts/dotnet-install.sh
+ create mode 100644 eng/common/generate-locproject.ps1
+ mode change 100644 => 100755 eng/common/init-tools-native.sh
+ mode change 100644 => 100755 eng/common/native/common-library.sh
+ mode change 100644 => 100755 eng/common/native/install-cmake.sh
+ delete mode 100644 eng/common/performance/blazor_perf.proj
+ delete mode 100644 eng/common/performance/crossgen_perf.proj
+ delete mode 100644 eng/common/performance/microbenchmarks.proj
+ delete mode 100644 eng/common/performance/performance-setup.ps1
+ delete mode 100755 eng/common/performance/performance-setup.sh
+ delete mode 100644 eng/common/sdl/push-gdn.ps1
+ create mode 100644 eng/common/templates/job/onelocbuild.yml
+ delete mode 100644 eng/common/templates/job/performance.yml
+ create mode 100644 eng/common/templates/job/source-index-stage1.yml
+ delete mode 100644 eng/common/templates/steps/perf-send-to-helix.yml
+ mode change 100644 => 100755 eng/common/tools.sh
+
+diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
+index bb36171..a0b5fc3 100644
+--- a/eng/common/SetupNugetSources.ps1
++++ b/eng/common/SetupNugetSources.ps1
+@@ -99,8 +99,9 @@ function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Passw
+ function EnablePrivatePackageSources($DisabledPackageSources) {
+ $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroPrivateSources) {
+- Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled"
+- $DisabledPackageSource.SetAttribute("value", "false")
++ Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
++ # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
++ $DisabledPackageSources.RemoveChild($DisabledPackageSource)
+ }
+ }
+
+diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
+old mode 100644
+new mode 100755
+index ef33382..2734601
+--- a/eng/common/SetupNugetSources.sh
++++ b/eng/common/SetupNugetSources.sh
+@@ -158,8 +158,8 @@ if [ "$?" == "0" ]; then
+ for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
+ if [[ $DisabledSourceName == darc-int* ]]
+ then
+- OldDisableValue="add key=\"$DisabledSourceName\" value=\"true\""
+- NewDisableValue="add key=\"$DisabledSourceName\" value=\"false\""
++ OldDisableValue=""
++ NewDisableValue=""
+ sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
+ echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
+ fi
+diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
+index 1fd7f68..8943da2 100644
+--- a/eng/common/build.ps1
++++ b/eng/common/build.ps1
+@@ -7,7 +7,6 @@ Param(
+ [string] $msbuildEngine = $null,
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+- [bool] $useDefaultDotnetInstall = $false,
+ [switch][Alias('r')]$restore,
+ [switch] $deployDeps,
+ [switch][Alias('b')]$build,
+@@ -26,6 +25,7 @@ Param(
+ [switch] $prepareMachine,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
++ [switch] $excludePrereleaseVS,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+ )
+@@ -66,7 +66,7 @@ function Print-Usage() {
+ Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
+ Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+- Write-Host " -useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
++ Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
+ Write-Host ""
+
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+diff --git a/eng/common/build.sh b/eng/common/build.sh
+index 19849ad..55b298f 100755
+--- a/eng/common/build.sh
++++ b/eng/common/build.sh
+@@ -36,8 +36,6 @@ usage()
+ echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
+ echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+- echo " --useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
+-
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+ echo "Arguments can also be passed in with a single hyphen."
+@@ -80,11 +78,10 @@ prepare_machine=false
+ verbosity='minimal'
+ runtime_source_feed=''
+ runtime_source_feed_key=''
+-use_default_dotnet_install=false
+
+ properties=''
+ while [[ $# > 0 ]]; do
+- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -help|-h)
+ usage
+@@ -159,14 +156,10 @@ while [[ $# > 0 ]]; do
+ runtime_source_feed=$2
+ shift
+ ;;
+- -runtimesourcefeedkey)
++ -runtimesourcefeedkey)
+ runtime_source_feed_key=$2
+ shift
+ ;;
+- -usedefaultdotnetinstall)
+- use_default_dotnet_install=$2
+- shift
+- ;;
+ *)
+ properties="$properties $1"
+ ;;
+diff --git a/eng/common/cross/arm64/tizen-build-rootfs.sh b/eng/common/cross/arm64/tizen-build-rootfs.sh
+old mode 100644
+new mode 100755
+diff --git a/eng/common/cross/arm64/tizen-fetch.sh b/eng/common/cross/arm64/tizen-fetch.sh
+old mode 100644
+new mode 100755
+index a48a6f5..16d1301
+--- a/eng/common/cross/arm64/tizen-fetch.sh
++++ b/eng/common/cross/arm64/tizen-fetch.sh
+@@ -157,7 +157,7 @@ fetch_tizen_pkgs()
+ Inform "Initialize arm base"
+ fetch_tizen_pkgs_init standard base
+ Inform "fetch common packages"
+-fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
++fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+ Inform "fetch coreclr packages"
+ fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+ Inform "fetch corefx packages"
+diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch
+new file mode 100644
+index 0000000..2d26156
+--- /dev/null
++++ b/eng/common/cross/armel/armel.jessie.patch
+@@ -0,0 +1,43 @@
++diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
++--- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
+++++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
++@@ -69,10 +69,10 @@
++ #endif
++ #ifdef UATOMIC_HAS_ATOMIC_SHORT
++ case 2:
++- return __sync_val_compare_and_swap_2(addr, old, _new);
+++ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
++ #endif
++ case 4:
++- return __sync_val_compare_and_swap_4(addr, old, _new);
+++ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
++ #if (CAA_BITS_PER_LONG == 64)
++ case 8:
++ return __sync_val_compare_and_swap_8(addr, old, _new);
++@@ -109,7 +109,7 @@
++ return;
++ #endif
++ case 4:
++- __sync_and_and_fetch_4(addr, val);
+++ __sync_and_and_fetch_4((uint32_t*) addr, val);
++ return;
++ #if (CAA_BITS_PER_LONG == 64)
++ case 8:
++@@ -148,7 +148,7 @@
++ return;
++ #endif
++ case 4:
++- __sync_or_and_fetch_4(addr, val);
+++ __sync_or_and_fetch_4((uint32_t*) addr, val);
++ return;
++ #if (CAA_BITS_PER_LONG == 64)
++ case 8:
++@@ -187,7 +187,7 @@
++ return __sync_add_and_fetch_2(addr, val);
++ #endif
++ case 4:
++- return __sync_add_and_fetch_4(addr, val);
+++ return __sync_add_and_fetch_4((uint32_t*) addr, val);
++ #if (CAA_BITS_PER_LONG == 64)
++ case 8:
++ return __sync_add_and_fetch_8(addr, val);
+diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh
+old mode 100644
+new mode 100755
+diff --git a/eng/common/cross/armel/tizen-fetch.sh b/eng/common/cross/armel/tizen-fetch.sh
+old mode 100644
+new mode 100755
+index 2776cbb..64f0187
+--- a/eng/common/cross/armel/tizen-fetch.sh
++++ b/eng/common/cross/armel/tizen-fetch.sh
+@@ -157,7 +157,7 @@ fetch_tizen_pkgs()
+ Inform "Initialize arm base"
+ fetch_tizen_pkgs_init standard base
+ Inform "fetch common packages"
+-fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
++fetch_tizen_pkgs armv7l gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+ Inform "fetch coreclr packages"
+ fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+ Inform "fetch corefx packages"
+diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
+old mode 100644
+new mode 100755
+index e7f12ed..42516bb
+--- a/eng/common/cross/build-android-rootfs.sh
++++ b/eng/common/cross/build-android-rootfs.sh
+@@ -27,7 +27,7 @@ __AndroidToolchain=aarch64-linux-android
+
+ for i in "$@"
+ do
+- lowerI="$(echo $i | awk '{print tolower($0)}')"
++ lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
+old mode 100644
+new mode 100755
+index ffdff38..591d866
+--- a/eng/common/cross/build-rootfs.sh
++++ b/eng/common/cross/build-rootfs.sh
+@@ -6,7 +6,7 @@ usage()
+ {
+ echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
+ echo "BuildArch can be: arm(default), armel, arm64, x86"
+- echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
++ echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo " for FreeBSD can be: freebsd11 or freebsd12."
+ echo " for illumos can be: illumos."
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
+@@ -74,6 +74,10 @@ __IllumosPackages+=" mit-krb5-1.16.2nb4"
+ __IllumosPackages+=" openssl-1.1.1e"
+ __IllumosPackages+=" zlib-1.2.11"
+
++# ML.NET dependencies
++__UbuntuPackages+=" libomp5"
++__UbuntuPackages+=" libomp-dev"
++
+ __UseMirror=0
+
+ __UnprocessedBuildArgs=
+@@ -82,7 +86,7 @@ while :; do
+ break
+ fi
+
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
++ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+@@ -106,6 +110,13 @@ while :; do
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __CodeName=jessie
+ ;;
++ s390x)
++ __BuildArch=s390x
++ __UbuntuArch=s390x
++ __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
++ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
++ unset __LLDB_Package
++ ;;
+ x86)
+ __BuildArch=x86
+ __UbuntuArch=i386
+@@ -176,9 +187,20 @@ while :; do
+ __UbuntuRepo=
+ __Tizen=tizen
+ ;;
+- alpine)
++ alpine|alpine3.9)
++ __CodeName=alpine
++ __UbuntuRepo=
++ __AlpineVersion=3.9
++ ;;
++ alpine3.13)
+ __CodeName=alpine
+ __UbuntuRepo=
++ __AlpineVersion=3.13
++ # Alpine 3.13 has all the packages we need in the 3.13 repository
++ __AlpinePackages+=$__AlpinePackagesEdgeCommunity
++ __AlpinePackagesEdgeCommunity=
++ __AlpinePackages+=$__AlpinePackagesEdgeMain
++ __AlpinePackagesEdgeMain=
+ ;;
+ freebsd11)
+ __FreeBSDBase="11.3-RELEASE"
+@@ -236,7 +258,6 @@ __RootfsDir="$( cd "$__RootfsDir" && pwd )"
+
+ if [[ "$__CodeName" == "alpine" ]]; then
+ __ApkToolsVersion=2.9.1
+- __AlpineVersion=3.9
+ __ApkToolsDir=$(mktemp -d)
+ wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
+ tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
+@@ -249,15 +270,19 @@ if [[ "$__CodeName" == "alpine" ]]; then
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackages
+
+- $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+- -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
+- -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+- add $__AlpinePackagesEdgeMain
++ if [[ -n "$__AlpinePackagesEdgeMain" ]]; then
++ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
++ -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
++ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
++ add $__AlpinePackagesEdgeMain
++ fi
+
+- $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+- -X http://dl-cdn.alpinelinux.org/alpine/edge/community \
+- -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+- add $__AlpinePackagesEdgeCommunity
++ if [[ -n "$__AlpinePackagesEdgeCommunity" ]]; then
++ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
++ -X http://dl-cdn.alpinelinux.org/alpine/edge/community \
++ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
++ add $__AlpinePackagesEdgeCommunity
++ fi
+
+ rm -r $__ApkToolsDir
+ elif [[ "$__CodeName" == "freebsd" ]]; then
+@@ -329,6 +354,7 @@ elif [[ -n $__CodeName ]]; then
+ chroot $__RootfsDir apt-get -f -y install
+ chroot $__RootfsDir apt-get -y install $__UbuntuPackages
+ chroot $__RootfsDir symlinks -cr /usr
++ chroot $__RootfsDir apt-get clean
+
+ if [ $__SkipUnmount == 0 ]; then
+ umount $__RootfsDir/* || true
+@@ -340,6 +366,12 @@ elif [[ -n $__CodeName ]]; then
+ patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
+ popd
+ fi
++
++ if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
++ pushd $__RootfsDir
++ patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch
++ popd
++ fi
+ elif [[ "$__Tizen" == "tizen" ]]; then
+ ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
+ else
+diff --git a/eng/common/cross/s390x/sources.list.bionic b/eng/common/cross/s390x/sources.list.bionic
+new file mode 100644
+index 0000000..2109557
+--- /dev/null
++++ b/eng/common/cross/s390x/sources.list.bionic
+@@ -0,0 +1,11 @@
++deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
++deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
++
++deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
++deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
++
++deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
++deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
++
++deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
++deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
+index 137736c..fc11001 100644
+--- a/eng/common/cross/toolchain.cmake
++++ b/eng/common/cross/toolchain.cmake
+@@ -36,6 +36,9 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ if("$ENV{__DistroRid}" MATCHES "tizen.*")
+ set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
+ endif()
++elseif(TARGET_ARCH_NAME STREQUAL "s390x")
++ set(CMAKE_SYSTEM_PROCESSOR s390x)
++ set(TOOLCHAIN "s390x-linux-gnu")
+ elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ set(CMAKE_SYSTEM_PROCESSOR i686)
+ set(TOOLCHAIN "i686-linux-gnu")
+@@ -46,7 +49,7 @@ elseif (ILLUMOS)
+ set(CMAKE_SYSTEM_PROCESSOR "x86_64")
+ set(TOOLCHAIN "x86_64-illumos")
+ else()
+- message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
++ message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64, s390x and x86 are supported!")
+ endif()
+
+ if(DEFINED ENV{TOOLCHAIN})
+@@ -139,6 +142,10 @@ function(add_toolchain_linker_flag Flag)
+ set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
+ endfunction()
+
++if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
++ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
++ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}")
++endif()
+
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
+@@ -167,7 +174,7 @@ endif()
+
+ # Specify compile options
+
+-if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
++if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|s390x)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
+ set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
+diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
+index d981d7b..39abdbe 100755
+--- a/eng/common/darc-init.sh
++++ b/eng/common/darc-init.sh
+@@ -6,7 +6,7 @@ versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc
+ verbosity='minimal'
+
+ while [[ $# > 0 ]]; do
+- opt="$(echo "$1" | awk '{print tolower($0)}')"
++ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ --darcversion)
+ darcVersion=$2
+diff --git a/eng/common/dotnet-install-scripts/dotnet-install.ps1 b/eng/common/dotnet-install-scripts/dotnet-install.ps1
+deleted file mode 100644
+index f63b533..0000000
+--- a/eng/common/dotnet-install-scripts/dotnet-install.ps1
++++ /dev/null
+@@ -1,774 +0,0 @@
+-#
+-# Copyright (c) .NET Foundation and contributors. All rights reserved.
+-# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+-#
+-
+-# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020
+-
+-<#
+-.SYNOPSIS
+- Installs dotnet cli
+-.DESCRIPTION
+- Installs dotnet cli. If dotnet installation already exists in the given directory
+- it will update it only if the requested version differs from the one already installed.
+-.PARAMETER Channel
+- Default: LTS
+- Download from the Channel specified. Possible values:
+- - Current - most current release
+- - LTS - most current supported release
+- - 2-part version in a format A.B - represents a specific release
+- examples: 2.0, 1.0
+- - Branch name
+- examples: release/2.0.0, Master
+- Note: The version parameter overrides the channel parameter.
+-.PARAMETER Version
+- Default: latest
+- Represents a build version on specific channel. Possible values:
+- - latest - most latest build on specific channel
+- - coherent - most latest coherent build on specific channel
+- coherent applies only to SDK downloads
+- - 3-part version in a format A.B.C - represents specific version of build
+- examples: 2.0.0-preview2-006120, 1.1.0
+-.PARAMETER InstallDir
+- Default: %LocalAppData%\Microsoft\dotnet
+- Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
+-.PARAMETER Architecture
+- Default: - this value represents currently running OS architecture
+- Architecture of dotnet binaries to be installed.
+- Possible values are: , amd64, x64, x86, arm64, arm
+-.PARAMETER SharedRuntime
+- This parameter is obsolete and may be removed in a future version of this script.
+- The recommended alternative is '-Runtime dotnet'.
+- Installs just the shared runtime bits, not the entire SDK.
+-.PARAMETER Runtime
+- Installs just a shared runtime, not the entire SDK.
+- Possible values:
+- - dotnet - the Microsoft.NETCore.App shared runtime
+- - aspnetcore - the Microsoft.AspNetCore.App shared runtime
+- - windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime
+-.PARAMETER DryRun
+- If set it will not perform installation but instead display what command line to use to consistently install
+- currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link
+- with specific version so that this command can be used deterministicly in a build script.
+- It also displays binaries location if you prefer to install or download it yourself.
+-.PARAMETER NoPath
+- By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder.
+- If set it will display binaries location but not set any environment variable.
+-.PARAMETER Verbose
+- Displays diagnostics information.
+-.PARAMETER AzureFeed
+- Default: https://dotnetcli.azureedge.net/dotnet
+- This parameter typically is not changed by the user.
+- It allows changing the URL for the Azure feed used by this installer.
+-.PARAMETER UncachedFeed
+- This parameter typically is not changed by the user.
+- It allows changing the URL for the Uncached feed used by this installer.
+-.PARAMETER FeedCredential
+- Used as a query string to append to the Azure feed.
+- It allows changing the URL to use non-public blob storage accounts.
+-.PARAMETER ProxyAddress
+- If set, the installer will use the proxy when making web requests
+-.PARAMETER ProxyUseDefaultCredentials
+- Default: false
+- Use default credentials, when using proxy address.
+-.PARAMETER ProxyBypassList
+- If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
+-.PARAMETER SkipNonVersionedFiles
+- Default: false
+- Skips installing non-versioned files if they already exist, such as dotnet.exe.
+-.PARAMETER NoCdn
+- Disable downloading from the Azure CDN, and use the uncached feed directly.
+-.PARAMETER JSonFile
+- Determines the SDK version from a user specified global.json file
+- Note: global.json must have a value for 'SDK:Version'
+-#>
+-[cmdletbinding()]
+-param(
+- [string]$Channel="LTS",
+- [string]$Version="Latest",
+- [string]$JSonFile,
+- [string]$InstallDir="",
+- [string]$Architecture="",
+- [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
+- [string]$Runtime,
+- [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
+- [switch]$SharedRuntime,
+- [switch]$DryRun,
+- [switch]$NoPath,
+- [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet",
+- [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet",
+- [string]$FeedCredential,
+- [string]$ProxyAddress,
+- [switch]$ProxyUseDefaultCredentials,
+- [string[]]$ProxyBypassList=@(),
+- [switch]$SkipNonVersionedFiles,
+- [switch]$NoCdn
+-)
+-
+-Set-StrictMode -Version Latest
+-$ErrorActionPreference="Stop"
+-$ProgressPreference="SilentlyContinue"
+-
+-if ($NoCdn) {
+- $AzureFeed = $UncachedFeed
+-}
+-
+-$BinFolderRelativePath=""
+-
+-if ($SharedRuntime -and (-not $Runtime)) {
+- $Runtime = "dotnet"
+-}
+-
+-# example path with regex: shared/1.0.0-beta-12345/somepath
+-$VersionRegEx="/\d+\.\d+[^/]+/"
+-$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
+-
+-function Say($str) {
+- try
+- {
+- Write-Host "dotnet-install: $str"
+- }
+- catch
+- {
+- # Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
+- Write-Output "dotnet-install: $str"
+- }
+-}
+-
+-function Say-Verbose($str) {
+- try
+- {
+- Write-Verbose "dotnet-install: $str"
+- }
+- catch
+- {
+- # Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
+- Write-Output "dotnet-install: $str"
+- }
+-}
+-
+-function Say-Invocation($Invocation) {
+- $command = $Invocation.MyCommand;
+- $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ")
+- Say-Verbose "$command $args"
+-}
+-
+-function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
+- $Attempts = 0
+-
+- while ($true) {
+- try {
+- return $ScriptBlock.Invoke()
+- }
+- catch {
+- $Attempts++
+- if ($Attempts -lt $MaxAttempts) {
+- Start-Sleep $SecondsBetweenAttempts
+- }
+- else {
+- throw
+- }
+- }
+- }
+-}
+-
+-function Get-Machine-Architecture() {
+- Say-Invocation $MyInvocation
+-
+- # On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
+- # To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
+- # PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
+- # Possible values: amd64, x64, x86, arm64, arm
+-
+- if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
+- {
+- return $ENV:PROCESSOR_ARCHITEW6432
+- }
+-
+- return $ENV:PROCESSOR_ARCHITECTURE
+-}
+-
+-function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
+- Say-Invocation $MyInvocation
+-
+- switch ($Architecture.ToLower()) {
+- { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
+- { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
+- { $_ -eq "x86" } { return "x86" }
+- { $_ -eq "arm" } { return "arm" }
+- { $_ -eq "arm64" } { return "arm64" }
+- default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
+- }
+-}
+-
+-# The version text returned from the feeds is a 1-line or 2-line string:
+-# For the SDK and the dotnet runtime (2 lines):
+-# Line 1: # commit_hash
+-# Line 2: # 4-part version
+-# For the aspnetcore runtime (1 line):
+-# Line 1: # 4-part version
+-function Get-Version-Info-From-Version-Text([string]$VersionText) {
+- Say-Invocation $MyInvocation
+-
+- $Data = -split $VersionText
+-
+- $VersionInfo = @{
+- CommitHash = $(if ($Data.Count -gt 1) { $Data[0] })
+- Version = $Data[-1] # last line is always the version number.
+- }
+- return $VersionInfo
+-}
+-
+-function Load-Assembly([string] $Assembly) {
+- try {
+- Add-Type -Assembly $Assembly | Out-Null
+- }
+- catch {
+- # On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd.
+- # Loading the base class assemblies is not unnecessary as the types will automatically get resolved.
+- }
+-}
+-
+-function GetHTTPResponse([Uri] $Uri)
+-{
+- Invoke-With-Retry(
+- {
+-
+- $HttpClient = $null
+-
+- try {
+- # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet.
+- Load-Assembly -Assembly System.Net.Http
+-
+- if(-not $ProxyAddress) {
+- try {
+- # Despite no proxy being explicitly specified, we may still be behind a default proxy
+- $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy;
+- if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) {
+- $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString
+- $ProxyUseDefaultCredentials = $true
+- }
+- } catch {
+- # Eat the exception and move forward as the above code is an attempt
+- # at resolving the DefaultProxy that may not have been a problem.
+- $ProxyAddress = $null
+- Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...")
+- }
+- }
+-
+- if($ProxyAddress) {
+- $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
+- $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
+- Address=$ProxyAddress;
+- UseDefaultCredentials=$ProxyUseDefaultCredentials;
+- BypassList = $ProxyBypassList;
+- }
+- $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
+- }
+- else {
+-
+- $HttpClient = New-Object System.Net.Http.HttpClient
+- }
+- # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
+- # 20 minutes allows it to work over much slower connections.
+- $HttpClient.Timeout = New-TimeSpan -Minutes 20
+- $Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
+- if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
+- # The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
+- $ErrorMsg = "Failed to download $Uri."
+- if ($Response -ne $null) {
+- $ErrorMsg += " $Response"
+- }
+-
+- throw $ErrorMsg
+- }
+-
+- return $Response
+- }
+- finally {
+- if ($HttpClient -ne $null) {
+- $HttpClient.Dispose()
+- }
+- }
+- })
+-}
+-
+-function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
+- Say-Invocation $MyInvocation
+-
+- $VersionFileUrl = $null
+- if ($Runtime -eq "dotnet") {
+- $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+- }
+- elseif ($Runtime -eq "aspnetcore") {
+- $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
+- }
+- # Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
+- elseif ($Runtime -eq "windowsdesktop") {
+- $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+- }
+- elseif (-not $Runtime) {
+- if ($Coherent) {
+- $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
+- }
+- else {
+- $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
+- }
+- }
+- else {
+- throw "Invalid value for `$Runtime"
+- }
+- try {
+- $Response = GetHTTPResponse -Uri $VersionFileUrl
+- }
+- catch {
+- throw "Could not resolve version information."
+- }
+- $StringContent = $Response.Content.ReadAsStringAsync().Result
+-
+- switch ($Response.Content.Headers.ContentType) {
+- { ($_ -eq "application/octet-stream") } { $VersionText = $StringContent }
+- { ($_ -eq "text/plain") } { $VersionText = $StringContent }
+- { ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent }
+- default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." }
+- }
+-
+- $VersionInfo = Get-Version-Info-From-Version-Text $VersionText
+-
+- return $VersionInfo
+-}
+-
+-function Parse-Jsonfile-For-Version([string]$JSonFile) {
+- Say-Invocation $MyInvocation
+-
+- If (-Not (Test-Path $JSonFile)) {
+- throw "Unable to find '$JSonFile'"
+- }
+- try {
+- $JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
+- }
+- catch {
+- throw "Json file unreadable: '$JSonFile'"
+- }
+- if ($JSonContent) {
+- try {
+- $JSonContent.PSObject.Properties | ForEach-Object {
+- $PropertyName = $_.Name
+- if ($PropertyName -eq "version") {
+- $Version = $_.Value
+- Say-Verbose "Version = $Version"
+- }
+- }
+- }
+- catch {
+- throw "Unable to parse the SDK node in '$JSonFile'"
+- }
+- }
+- else {
+- throw "Unable to find the SDK node in '$JSonFile'"
+- }
+- If ($Version -eq $null) {
+- throw "Unable to find the SDK:version node in '$JSonFile'"
+- }
+- return $Version
+-}
+-
+-function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) {
+- Say-Invocation $MyInvocation
+-
+- if (-not $JSonFile) {
+- switch ($Version.ToLower()) {
+- { $_ -eq "latest" } {
+- $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
+- return $LatestVersionInfo.Version
+- }
+- { $_ -eq "coherent" } {
+- $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
+- return $LatestVersionInfo.Version
+- }
+- default { return $Version }
+- }
+- }
+- else {
+- return Parse-Jsonfile-For-Version $JSonFile
+- }
+-}
+-
+-function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+- Say-Invocation $MyInvocation
+-
+- # If anything fails in this lookup it will default to $SpecificVersion
+- $SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
+-
+- if ($Runtime -eq "dotnet") {
+- $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+- }
+- elseif ($Runtime -eq "aspnetcore") {
+- $PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+- }
+- elseif ($Runtime -eq "windowsdesktop") {
+- $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+- }
+- elseif (-not $Runtime) {
+- $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
+- }
+- else {
+- throw "Invalid value for `$Runtime"
+- }
+-
+- Say-Verbose "Constructed primary named payload URL: $PayloadURL"
+-
+- return $PayloadURL, $SpecificProductVersion
+-}
+-
+-function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+- Say-Invocation $MyInvocation
+-
+- if (-not $Runtime) {
+- $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip"
+- }
+- elseif ($Runtime -eq "dotnet") {
+- $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip"
+- }
+- else {
+- return $null
+- }
+-
+- Say-Verbose "Constructed legacy named payload URL: $PayloadURL"
+-
+- return $PayloadURL
+-}
+-
+-function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
+- Say-Invocation $MyInvocation
+-
+- if ($Runtime -eq "dotnet") {
+- $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+- }
+- elseif ($Runtime -eq "aspnetcore") {
+- $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
+- }
+- elseif ($Runtime -eq "windowsdesktop") {
+- $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+- }
+- elseif (-not $Runtime) {
+- $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
+- }
+- else {
+- throw "Invalid value specified for `$Runtime"
+- }
+-
+- Say-Verbose "Checking for existence of $ProductVersionTxtURL"
+-
+- try {
+- $productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
+-
+- if ($productVersionResponse.StatusCode -eq 200) {
+- $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
+- if ($productVersion -ne $SpecificVersion)
+- {
+- Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
+- }
+-
+- return $productVersion
+- }
+- else {
+- Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+- $productVersion = $SpecificVersion
+- }
+- } catch {
+- Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+- $productVersion = $SpecificVersion
+- }
+-
+- return $productVersion
+-}
+-
+-function Get-User-Share-Path() {
+- Say-Invocation $MyInvocation
+-
+- $InstallRoot = $env:DOTNET_INSTALL_DIR
+- if (!$InstallRoot) {
+- $InstallRoot = "$env:LocalAppData\Microsoft\dotnet"
+- }
+- return $InstallRoot
+-}
+-
+-function Resolve-Installation-Path([string]$InstallDir) {
+- Say-Invocation $MyInvocation
+-
+- if ($InstallDir -eq "") {
+- return Get-User-Share-Path
+- }
+- return $InstallDir
+-}
+-
+-function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) {
+- Say-Invocation $MyInvocation
+-
+- $DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion
+- Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath"
+- return Test-Path $DotnetPackagePath -PathType Container
+-}
+-
+-function Get-Absolute-Path([string]$RelativeOrAbsolutePath) {
+- # Too much spam
+- # Say-Invocation $MyInvocation
+-
+- return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath)
+-}
+-
+-function Get-Path-Prefix-With-Version($path) {
+- $match = [regex]::match($path, $VersionRegEx)
+- if ($match.Success) {
+- return $entry.FullName.Substring(0, $match.Index + $match.Length)
+- }
+-
+- return $null
+-}
+-
+-function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) {
+- Say-Invocation $MyInvocation
+-
+- $ret = @()
+- foreach ($entry in $Zip.Entries) {
+- $dir = Get-Path-Prefix-With-Version $entry.FullName
+- if ($dir -ne $null) {
+- $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir)
+- if (-Not (Test-Path $path -PathType Container)) {
+- $ret += $dir
+- }
+- }
+- }
+-
+- $ret = $ret | Sort-Object | Get-Unique
+-
+- $values = ($ret | foreach { "$_" }) -join ";"
+- Say-Verbose "Directories to unpack: $values"
+-
+- return $ret
+-}
+-
+-# Example zip content and extraction algorithm:
+-# Rule: files if extracted are always being extracted to the same relative path locally
+-# .\
+-# a.exe # file does not exist locally, extract
+-# b.dll # file exists locally, override only if $OverrideFiles set
+-# aaa\ # same rules as for files
+-# ...
+-# abc\1.0.0\ # directory contains version and exists locally
+-# ... # do not extract content under versioned part
+-# abc\asd\ # same rules as for files
+-# ...
+-# def\ghi\1.0.1\ # directory contains version and does not exist locally
+-# ... # extract content
+-function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) {
+- Say-Invocation $MyInvocation
+-
+- Load-Assembly -Assembly System.IO.Compression.FileSystem
+- Set-Variable -Name Zip
+- try {
+- $Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath)
+-
+- $DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath
+-
+- foreach ($entry in $Zip.Entries) {
+- $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName
+- if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) {
+- $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName)
+- $DestinationDir = Split-Path -Parent $DestinationPath
+- $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath))
+- if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) {
+- New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null
+- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles)
+- }
+- }
+- }
+- }
+- finally {
+- if ($Zip -ne $null) {
+- $Zip.Dispose()
+- }
+- }
+-}
+-
+-function DownloadFile($Source, [string]$OutPath) {
+- if ($Source -notlike "http*") {
+- # Using System.IO.Path.GetFullPath to get the current directory
+- # does not work in this context - $pwd gives the current directory
+- if (![System.IO.Path]::IsPathRooted($Source)) {
+- $Source = $(Join-Path -Path $pwd -ChildPath $Source)
+- }
+- $Source = Get-Absolute-Path $Source
+- Say "Copying file from $Source to $OutPath"
+- Copy-Item $Source $OutPath
+- return
+- }
+-
+- $Stream = $null
+-
+- try {
+- $Response = GetHTTPResponse -Uri $Source
+- $Stream = $Response.Content.ReadAsStreamAsync().Result
+- $File = [System.IO.File]::Create($OutPath)
+- $Stream.CopyTo($File)
+- $File.Close()
+- }
+- finally {
+- if ($Stream -ne $null) {
+- $Stream.Dispose()
+- }
+- }
+-}
+-
+-function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
+- $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
+- if (-Not $NoPath) {
+- $SuffixedBinPath = "$BinPath;"
+- if (-Not $env:path.Contains($SuffixedBinPath)) {
+- Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process."
+- $env:path = $SuffixedBinPath + $env:path
+- } else {
+- Say-Verbose "Current process PATH already contains `"$BinPath`""
+- }
+- }
+- else {
+- Say "Binaries of dotnet can be found in $BinPath"
+- }
+-}
+-
+-$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
+-$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
+-$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+-$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+-
+-$InstallRoot = Resolve-Installation-Path $InstallDir
+-Say-Verbose "InstallRoot: $InstallRoot"
+-$ScriptName = $MyInvocation.MyCommand.Name
+-
+-if ($DryRun) {
+- Say "Payload URLs:"
+- Say "Primary named payload URL: $DownloadLink"
+- if ($LegacyDownloadLink) {
+- Say "Legacy named payload URL: $LegacyDownloadLink"
+- }
+- $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
+- if ($Runtime -eq "dotnet") {
+- $RepeatableCommand+=" -Runtime `"dotnet`""
+- }
+- elseif ($Runtime -eq "aspnetcore") {
+- $RepeatableCommand+=" -Runtime `"aspnetcore`""
+- }
+- foreach ($key in $MyInvocation.BoundParameters.Keys) {
+- if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
+- $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
+- }
+- }
+- Say "Repeatable invocation: $RepeatableCommand"
+- exit 0
+-}
+-
+-if ($Runtime -eq "dotnet") {
+- $assetName = ".NET Core Runtime"
+- $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App"
+-}
+-elseif ($Runtime -eq "aspnetcore") {
+- $assetName = "ASP.NET Core Runtime"
+- $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App"
+-}
+-elseif ($Runtime -eq "windowsdesktop") {
+- $assetName = ".NET Core Windows Desktop Runtime"
+- $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App"
+-}
+-elseif (-not $Runtime) {
+- $assetName = ".NET Core SDK"
+- $dotnetPackageRelativePath = "sdk"
+-}
+-else {
+- throw "Invalid value for `$Runtime"
+-}
+-
+-if ($SpecificVersion -ne $EffectiveVersion)
+-{
+- Say "Performing installation checks for effective version: $EffectiveVersion"
+- $SpecificVersion = $EffectiveVersion
+-}
+-
+-# Check if the SDK version is already installed.
+-$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+-if ($isAssetInstalled) {
+- Say "$assetName version $SpecificVersion is already installed."
+- Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+- exit 0
+-}
+-
+-New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
+-
+-$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
+-$diskInfo = Get-PSDrive -Name $installDrive
+-if ($diskInfo.Free / 1MB -le 100) {
+- Say "There is not enough disk space on drive ${installDrive}:"
+- exit 0
+-}
+-
+-$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+-Say-Verbose "Zip path: $ZipPath"
+-
+-$DownloadFailed = $false
+-Say "Downloading link: $DownloadLink"
+-try {
+- DownloadFile -Source $DownloadLink -OutPath $ZipPath
+-}
+-catch {
+- Say "Cannot download: $DownloadLink"
+- if ($LegacyDownloadLink) {
+- $DownloadLink = $LegacyDownloadLink
+- $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+- Say-Verbose "Legacy zip path: $ZipPath"
+- Say "Downloading legacy link: $DownloadLink"
+- try {
+- DownloadFile -Source $DownloadLink -OutPath $ZipPath
+- }
+- catch {
+- Say "Cannot download: $DownloadLink"
+- $DownloadFailed = $true
+- }
+- }
+- else {
+- $DownloadFailed = $true
+- }
+-}
+-
+-if ($DownloadFailed) {
+- throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+-}
+-
+-Say "Extracting zip from $DownloadLink"
+-Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot
+-
+-# Check if the SDK version is installed; if not, fail the installation.
+-$isAssetInstalled = $false
+-
+-# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+-if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") {
+- $ReleaseVersion = $SpecificVersion.Split("-")[0]
+- Say-Verbose "Checking installation: version = $ReleaseVersion"
+- $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion
+-}
+-
+-# Check if the SDK version is installed.
+-if (!$isAssetInstalled) {
+- Say-Verbose "Checking installation: version = $SpecificVersion"
+- $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+-}
+-
+-if (!$isAssetInstalled) {
+- throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
+-}
+-
+-Remove-Item $ZipPath
+-
+-Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+-
+-Say "Installation finished"
+-exit 0
+\ No newline at end of file
+diff --git a/eng/common/dotnet-install-scripts/dotnet-install.sh b/eng/common/dotnet-install-scripts/dotnet-install.sh
+deleted file mode 100644
+index 9216114..0000000
+--- a/eng/common/dotnet-install-scripts/dotnet-install.sh
++++ /dev/null
+@@ -1,1133 +0,0 @@
+-#!/usr/bin/env bash
+-# Copyright (c) .NET Foundation and contributors. All rights reserved.
+-# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+-#
+-
+-# Stop script on NZEC
+-set -e
+-# Stop script if unbound variable found (use ${var:-} if intentional)
+-set -u
+-# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
+-# This is causing it to fail
+-set -o pipefail
+-
+-# Use in the the functions: eval $invocation
+-invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
+-
+-# standard output may be used as a return value in the functions
+-# we need a way to write text on the screen in the functions so that
+-# it won't interfere with the return value.
+-# Exposing stream 3 as a pipe to standard output of the script itself
+-exec 3>&1
+-
+-# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
+-# See if stdout is a terminal
+-if [ -t 1 ] && command -v tput > /dev/null; then
+- # see if it supports colors
+- ncolors=$(tput colors)
+- if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+- bold="$(tput bold || echo)"
+- normal="$(tput sgr0 || echo)"
+- black="$(tput setaf 0 || echo)"
+- red="$(tput setaf 1 || echo)"
+- green="$(tput setaf 2 || echo)"
+- yellow="$(tput setaf 3 || echo)"
+- blue="$(tput setaf 4 || echo)"
+- magenta="$(tput setaf 5 || echo)"
+- cyan="$(tput setaf 6 || echo)"
+- white="$(tput setaf 7 || echo)"
+- fi
+-fi
+-
+-say_warning() {
+- printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
+-}
+-
+-say_err() {
+- printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
+-}
+-
+-say() {
+- # using stream 3 (defined in the beginning) to not interfere with stdout of functions
+- # which may be used as return value
+- printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
+-}
+-
+-say_verbose() {
+- if [ "$verbose" = true ]; then
+- say "$1"
+- fi
+-}
+-
+-# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
+-# then and only then should the Linux distribution appear in this list.
+-# Adding a Linux distribution to this list does not imply distribution-specific support.
+-get_legacy_os_name_from_platform() {
+- eval $invocation
+-
+- platform="$1"
+- case "$platform" in
+- "centos.7")
+- echo "centos"
+- return 0
+- ;;
+- "debian.8")
+- echo "debian"
+- return 0
+- ;;
+- "debian.9")
+- echo "debian.9"
+- return 0
+- ;;
+- "fedora.23")
+- echo "fedora.23"
+- return 0
+- ;;
+- "fedora.24")
+- echo "fedora.24"
+- return 0
+- ;;
+- "fedora.27")
+- echo "fedora.27"
+- return 0
+- ;;
+- "fedora.28")
+- echo "fedora.28"
+- return 0
+- ;;
+- "opensuse.13.2")
+- echo "opensuse.13.2"
+- return 0
+- ;;
+- "opensuse.42.1")
+- echo "opensuse.42.1"
+- return 0
+- ;;
+- "opensuse.42.3")
+- echo "opensuse.42.3"
+- return 0
+- ;;
+- "rhel.7"*)
+- echo "rhel"
+- return 0
+- ;;
+- "ubuntu.14.04")
+- echo "ubuntu"
+- return 0
+- ;;
+- "ubuntu.16.04")
+- echo "ubuntu.16.04"
+- return 0
+- ;;
+- "ubuntu.16.10")
+- echo "ubuntu.16.10"
+- return 0
+- ;;
+- "ubuntu.18.04")
+- echo "ubuntu.18.04"
+- return 0
+- ;;
+- "alpine.3.4.3")
+- echo "alpine"
+- return 0
+- ;;
+- esac
+- return 1
+-}
+-
+-get_linux_platform_name() {
+- eval $invocation
+-
+- if [ -n "$runtime_id" ]; then
+- echo "${runtime_id%-*}"
+- return 0
+- else
+- if [ -e /etc/os-release ]; then
+- . /etc/os-release
+- echo "$ID${VERSION_ID:+.${VERSION_ID}}"
+- return 0
+- elif [ -e /etc/redhat-release ]; then
+- local redhatRelease=$(&1 || true) | grep -q musl
+-}
+-
+-get_current_os_name() {
+- eval $invocation
+-
+- local uname=$(uname)
+- if [ "$uname" = "Darwin" ]; then
+- echo "osx"
+- return 0
+- elif [ "$uname" = "FreeBSD" ]; then
+- echo "freebsd"
+- return 0
+- elif [ "$uname" = "Linux" ]; then
+- local linux_platform_name
+- linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
+-
+- if [ "$linux_platform_name" = "rhel.6" ]; then
+- echo $linux_platform_name
+- return 0
+- elif is_musl_based_distro; then
+- echo "linux-musl"
+- return 0
+- else
+- echo "linux"
+- return 0
+- fi
+- fi
+-
+- say_err "OS name could not be detected: UName = $uname"
+- return 1
+-}
+-
+-get_legacy_os_name() {
+- eval $invocation
+-
+- local uname=$(uname)
+- if [ "$uname" = "Darwin" ]; then
+- echo "osx"
+- return 0
+- elif [ -n "$runtime_id" ]; then
+- echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
+- return 0
+- else
+- if [ -e /etc/os-release ]; then
+- . /etc/os-release
+- os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "")
+- if [ -n "$os" ]; then
+- echo "$os"
+- return 0
+- fi
+- fi
+- fi
+-
+- say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
+- return 1
+-}
+-
+-machine_has() {
+- eval $invocation
+-
+- hash "$1" > /dev/null 2>&1
+- return $?
+-}
+-
+-
+-check_min_reqs() {
+- local hasMinimum=false
+- if machine_has "curl"; then
+- hasMinimum=true
+- elif machine_has "wget"; then
+- hasMinimum=true
+- fi
+-
+- if [ "$hasMinimum" = "false" ]; then
+- say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
+- return 1
+- fi
+- return 0
+-}
+-
+-check_pre_reqs() {
+- eval $invocation
+-
+- if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
+- return 0
+- fi
+-
+- if [ "$(uname)" = "Linux" ]; then
+- if is_musl_based_distro; then
+- if ! command -v scanelf > /dev/null; then
+- say_warning "scanelf not found, please install pax-utils package."
+- return 0
+- fi
+- LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'"
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)."
+- else
+- if [ ! -x "$(command -v ldconfig)" ]; then
+- say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig."
+- LDCONFIG_COMMAND="/sbin/ldconfig"
+- else
+- LDCONFIG_COMMAND="ldconfig"
+- fi
+- local librarypath=${LD_LIBRARY_PATH:-}
+- LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
+- fi
+-
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib."
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl."
+- [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
+- fi
+-
+- return 0
+-}
+-
+-# args:
+-# input - $1
+-to_lowercase() {
+- #eval $invocation
+-
+- echo "$1" | tr '[:upper:]' '[:lower:]'
+- return 0
+-}
+-
+-# args:
+-# input - $1
+-remove_trailing_slash() {
+- #eval $invocation
+-
+- local input="${1:-}"
+- echo "${input%/}"
+- return 0
+-}
+-
+-# args:
+-# input - $1
+-remove_beginning_slash() {
+- #eval $invocation
+-
+- local input="${1:-}"
+- echo "${input#/}"
+- return 0
+-}
+-
+-# args:
+-# root_path - $1
+-# child_path - $2 - this parameter can be empty
+-combine_paths() {
+- eval $invocation
+-
+- # TODO: Consider making it work with any number of paths. For now:
+- if [ ! -z "${3:-}" ]; then
+- say_err "combine_paths: Function takes two parameters."
+- return 1
+- fi
+-
+- local root_path="$(remove_trailing_slash "$1")"
+- local child_path="$(remove_beginning_slash "${2:-}")"
+- say_verbose "combine_paths: root_path=$root_path"
+- say_verbose "combine_paths: child_path=$child_path"
+- echo "$root_path/$child_path"
+- return 0
+-}
+-
+-get_machine_architecture() {
+- eval $invocation
+-
+- if command -v uname > /dev/null; then
+- CPUName=$(uname -m)
+- case $CPUName in
+- armv7l)
+- echo "arm"
+- return 0
+- ;;
+- aarch64)
+- echo "arm64"
+- return 0
+- ;;
+- esac
+- fi
+-
+- # Always default to 'x64'
+- echo "x64"
+- return 0
+-}
+-
+-# args:
+-# architecture - $1
+-get_normalized_architecture_from_architecture() {
+- eval $invocation
+-
+- local architecture="$(to_lowercase "$1")"
+- case "$architecture" in
+- \)
+- echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")"
+- return 0
+- ;;
+- amd64|x64)
+- echo "x64"
+- return 0
+- ;;
+- arm)
+- echo "arm"
+- return 0
+- ;;
+- arm64)
+- echo "arm64"
+- return 0
+- ;;
+- esac
+-
+- say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues"
+- return 1
+-}
+-
+-# The version text returned from the feeds is a 1-line or 2-line string:
+-# For the SDK and the dotnet runtime (2 lines):
+-# Line 1: # commit_hash
+-# Line 2: # 4-part version
+-# For the aspnetcore runtime (1 line):
+-# Line 1: # 4-part version
+-
+-# args:
+-# version_text - stdin
+-get_version_from_version_info() {
+- eval $invocation
+-
+- cat | tail -n 1 | sed 's/\r$//'
+- return 0
+-}
+-
+-# args:
+-# install_root - $1
+-# relative_path_to_package - $2
+-# specific_version - $3
+-is_dotnet_package_installed() {
+- eval $invocation
+-
+- local install_root="$1"
+- local relative_path_to_package="$2"
+- local specific_version="${3//[$'\t\r\n']}"
+-
+- local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
+- say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
+-
+- if [ -d "$dotnet_package_path" ]; then
+- return 0
+- else
+- return 1
+- fi
+-}
+-
+-# args:
+-# azure_feed - $1
+-# channel - $2
+-# normalized_architecture - $3
+-# coherent - $4
+-get_latest_version_info() {
+- eval $invocation
+-
+- local azure_feed="$1"
+- local channel="$2"
+- local normalized_architecture="$3"
+- local coherent="$4"
+-
+- local version_file_url=null
+- if [[ "$runtime" == "dotnet" ]]; then
+- version_file_url="$uncached_feed/Runtime/$channel/latest.version"
+- elif [[ "$runtime" == "aspnetcore" ]]; then
+- version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
+- elif [ -z "$runtime" ]; then
+- if [ "$coherent" = true ]; then
+- version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
+- else
+- version_file_url="$uncached_feed/Sdk/$channel/latest.version"
+- fi
+- else
+- say_err "Invalid value for \$runtime"
+- return 1
+- fi
+- say_verbose "get_latest_version_info: latest url: $version_file_url"
+-
+- download "$version_file_url"
+- return $?
+-}
+-
+-# args:
+-# json_file - $1
+-parse_jsonfile_for_version() {
+- eval $invocation
+-
+- local json_file="$1"
+- if [ ! -f "$json_file" ]; then
+- say_err "Unable to find \`$json_file\`"
+- return 1
+- fi
+-
+- sdk_section=$(cat $json_file | awk '/"sdk"/,/}/')
+- if [ -z "$sdk_section" ]; then
+- say_err "Unable to parse the SDK node in \`$json_file\`"
+- return 1
+- fi
+-
+- sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
+- sdk_list=${sdk_list//[\" ]/}
+- sdk_list=${sdk_list//,/$'\n'}
+- sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')"
+-
+- local version_info=""
+- while read -r line; do
+- IFS=:
+- while read -r key value; do
+- if [[ "$key" == "version" ]]; then
+- version_info=$value
+- fi
+- done <<< "$line"
+- done <<< "$sdk_list"
+- if [ -z "$version_info" ]; then
+- say_err "Unable to find the SDK:version node in \`$json_file\`"
+- return 1
+- fi
+-
+- unset IFS;
+- echo "$version_info"
+- return 0
+-}
+-
+-# args:
+-# azure_feed - $1
+-# channel - $2
+-# normalized_architecture - $3
+-# version - $4
+-# json_file - $5
+-get_specific_version_from_version() {
+- eval $invocation
+-
+- local azure_feed="$1"
+- local channel="$2"
+- local normalized_architecture="$3"
+- local version="$(to_lowercase "$4")"
+- local json_file="$5"
+-
+- if [ -z "$json_file" ]; then
+- case "$version" in
+- latest)
+- local version_info
+- version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
+- say_verbose "get_specific_version_from_version: version_info=$version_info"
+- echo "$version_info" | get_version_from_version_info
+- return 0
+- ;;
+- coherent)
+- local version_info
+- version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
+- say_verbose "get_specific_version_from_version: version_info=$version_info"
+- echo "$version_info" | get_version_from_version_info
+- return 0
+- ;;
+- *)
+- echo "$version"
+- return 0
+- ;;
+- esac
+- else
+- local version_info
+- version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
+- echo "$version_info"
+- return 0
+- fi
+-}
+-
+-# args:
+-# azure_feed - $1
+-# channel - $2
+-# normalized_architecture - $3
+-# specific_version - $4
+-construct_download_link() {
+- eval $invocation
+-
+- local azure_feed="$1"
+- local channel="$2"
+- local normalized_architecture="$3"
+- local specific_version="${4//[$'\t\r\n']}"
+- local specific_product_version="$(get_specific_product_version "$1" "$4")"
+-
+- local osname
+- osname="$(get_current_os_name)" || return 1
+-
+- local download_link=null
+- if [[ "$runtime" == "dotnet" ]]; then
+- download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+- elif [[ "$runtime" == "aspnetcore" ]]; then
+- download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+- elif [ -z "$runtime" ]; then
+- download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+- else
+- return 1
+- fi
+-
+- echo "$download_link"
+- return 0
+-}
+-
+-# args:
+-# azure_feed - $1
+-# specific_version - $2
+-get_specific_product_version() {
+- # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
+- # to resolve the version of what's in the folder, superseding the specified version.
+- eval $invocation
+-
+- local azure_feed="$1"
+- local specific_version="${2//[$'\t\r\n']}"
+- local specific_product_version=$specific_version
+-
+- local download_link=null
+- if [[ "$runtime" == "dotnet" ]]; then
+- download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
+- elif [[ "$runtime" == "aspnetcore" ]]; then
+- download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
+- elif [ -z "$runtime" ]; then
+- download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
+- else
+- return 1
+- fi
+-
+- specific_product_version=$(curl -s --fail "$download_link")
+- if [ $? -ne 0 ]
+- then
+- specific_product_version=$(wget -qO- "$download_link")
+- if [ $? -ne 0 ]
+- then
+- specific_product_version=$specific_version
+- fi
+- fi
+- specific_product_version="${specific_product_version//[$'\t\r\n']}"
+-
+- echo "$specific_product_version"
+- return 0
+-}
+-
+-# args:
+-# azure_feed - $1
+-# channel - $2
+-# normalized_architecture - $3
+-# specific_version - $4
+-construct_legacy_download_link() {
+- eval $invocation
+-
+- local azure_feed="$1"
+- local channel="$2"
+- local normalized_architecture="$3"
+- local specific_version="${4//[$'\t\r\n']}"
+-
+- local distro_specific_osname
+- distro_specific_osname="$(get_legacy_os_name)" || return 1
+-
+- local legacy_download_link=null
+- if [[ "$runtime" == "dotnet" ]]; then
+- legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+- elif [ -z "$runtime" ]; then
+- legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+- else
+- return 1
+- fi
+-
+- echo "$legacy_download_link"
+- return 0
+-}
+-
+-get_user_install_path() {
+- eval $invocation
+-
+- if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
+- echo "$DOTNET_INSTALL_DIR"
+- else
+- echo "$HOME/.dotnet"
+- fi
+- return 0
+-}
+-
+-# args:
+-# install_dir - $1
+-resolve_installation_path() {
+- eval $invocation
+-
+- local install_dir=$1
+- if [ "$install_dir" = "" ]; then
+- local user_install_path="$(get_user_install_path)"
+- say_verbose "resolve_installation_path: user_install_path=$user_install_path"
+- echo "$user_install_path"
+- return 0
+- fi
+-
+- echo "$install_dir"
+- return 0
+-}
+-
+-# args:
+-# relative_or_absolute_path - $1
+-get_absolute_path() {
+- eval $invocation
+-
+- local relative_or_absolute_path=$1
+- echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
+- return 0
+-}
+-
+-# args:
+-# input_files - stdin
+-# root_path - $1
+-# out_path - $2
+-# override - $3
+-copy_files_or_dirs_from_list() {
+- eval $invocation
+-
+- local root_path="$(remove_trailing_slash "$1")"
+- local out_path="$(remove_trailing_slash "$2")"
+- local override="$3"
+- local osname="$(get_current_os_name)"
+- local override_switch=$(
+- if [ "$override" = false ]; then
+- if [ "$osname" = "linux-musl" ]; then
+- printf -- "-u";
+- else
+- printf -- "-n";
+- fi
+- fi)
+-
+- cat | uniq | while read -r file_path; do
+- local path="$(remove_beginning_slash "${file_path#$root_path}")"
+- local target="$out_path/$path"
+- if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
+- mkdir -p "$out_path/$(dirname "$path")"
+- if [ -d "$target" ]; then
+- rm -rf "$target"
+- fi
+- cp -R $override_switch "$root_path/$path" "$target"
+- fi
+- done
+-}
+-
+-# args:
+-# zip_path - $1
+-# out_path - $2
+-extract_dotnet_package() {
+- eval $invocation
+-
+- local zip_path="$1"
+- local out_path="$2"
+-
+- local temp_out_path="$(mktemp -d "$temporary_file_template")"
+-
+- local failed=false
+- tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
+-
+- local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
+- find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
+- find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
+-
+- rm -rf "$temp_out_path"
+-
+- if [ "$failed" = true ]; then
+- say_err "Extraction failed"
+- return 1
+- fi
+-}
+-
+-# args:
+-# remote_path - $1
+-# [out_path] - $2 - stdout if not provided
+-download() {
+- eval $invocation
+-
+- local remote_path="$1"
+- local out_path="${2:-}"
+-
+- if [[ "$remote_path" != "http"* ]]; then
+- cp "$remote_path" "$out_path"
+- return $?
+- fi
+-
+- local failed=false
+- if machine_has "curl"; then
+- downloadcurl "$remote_path" "$out_path" || failed=true
+- elif machine_has "wget"; then
+- downloadwget "$remote_path" "$out_path" || failed=true
+- else
+- failed=true
+- fi
+- if [ "$failed" = true ]; then
+- say_verbose "Download failed: $remote_path"
+- return 1
+- fi
+- return 0
+-}
+-
+-downloadcurl() {
+- eval $invocation
+- local remote_path="$1"
+- local out_path="${2:-}"
+-
+- # Append feed_credential as late as possible before calling curl to avoid logging feed_credential
+- remote_path="${remote_path}${feed_credential}"
+-
+- local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
+- local failed=false
+- if [ -z "$out_path" ]; then
+- curl $curl_options "$remote_path" || failed=true
+- else
+- curl $curl_options -o "$out_path" "$remote_path" || failed=true
+- fi
+- if [ "$failed" = true ]; then
+- say_verbose "Curl download failed"
+- return 1
+- fi
+- return 0
+-}
+-
+-downloadwget() {
+- eval $invocation
+- local remote_path="$1"
+- local out_path="${2:-}"
+-
+- # Append feed_credential as late as possible before calling wget to avoid logging feed_credential
+- remote_path="${remote_path}${feed_credential}"
+- local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
+- local failed=false
+- if [ -z "$out_path" ]; then
+- wget -q $wget_options -O - "$remote_path" || failed=true
+- else
+- wget $wget_options -O "$out_path" "$remote_path" || failed=true
+- fi
+- if [ "$failed" = true ]; then
+- say_verbose "Wget download failed"
+- return 1
+- fi
+- return 0
+-}
+-
+-calculate_vars() {
+- eval $invocation
+- valid_legacy_download_link=true
+-
+- normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
+- say_verbose "normalized_architecture=$normalized_architecture"
+-
+- specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
+- specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
+- say_verbose "specific_version=$specific_version"
+- if [ -z "$specific_version" ]; then
+- say_err "Could not resolve version information."
+- return 1
+- fi
+-
+- download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
+- say_verbose "Constructed primary named payload URL: $download_link"
+-
+- legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
+-
+- if [ "$valid_legacy_download_link" = true ]; then
+- say_verbose "Constructed legacy named payload URL: $legacy_download_link"
+- else
+- say_verbose "Cound not construct a legacy_download_link; omitting..."
+- fi
+-
+- install_root="$(resolve_installation_path "$install_dir")"
+- say_verbose "InstallRoot: $install_root"
+-}
+-
+-install_dotnet() {
+- eval $invocation
+- local download_failed=false
+- local asset_name=''
+- local asset_relative_path=''
+-
+- if [[ "$runtime" == "dotnet" ]]; then
+- asset_relative_path="shared/Microsoft.NETCore.App"
+- asset_name=".NET Core Runtime"
+- elif [[ "$runtime" == "aspnetcore" ]]; then
+- asset_relative_path="shared/Microsoft.AspNetCore.App"
+- asset_name="ASP.NET Core Runtime"
+- elif [ -z "$runtime" ]; then
+- asset_relative_path="sdk"
+- asset_name=".NET Core SDK"
+- else
+- say_err "Invalid value for \$runtime"
+- return 1
+- fi
+-
+- # Check if the SDK version is already installed.
+- if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
+- say "$asset_name version $specific_version is already installed."
+- return 0
+- fi
+-
+- mkdir -p "$install_root"
+- zip_path="$(mktemp "$temporary_file_template")"
+- say_verbose "Zip path: $zip_path"
+-
+- say "Downloading link: $download_link"
+-
+- # Failures are normal in the non-legacy case for ultimately legacy downloads.
+- # Do not output to stderr, since output to stderr is considered an error.
+- download "$download_link" "$zip_path" 2>&1 || download_failed=true
+-
+- # if the download fails, download the legacy_download_link
+- if [ "$download_failed" = true ]; then
+- say "Cannot download: $download_link"
+-
+- if [ "$valid_legacy_download_link" = true ]; then
+- download_failed=false
+- download_link="$legacy_download_link"
+- zip_path="$(mktemp "$temporary_file_template")"
+- say_verbose "Legacy zip path: $zip_path"
+- say "Downloading legacy link: $download_link"
+- download "$download_link" "$zip_path" 2>&1 || download_failed=true
+-
+- if [ "$download_failed" = true ]; then
+- say "Cannot download: $download_link"
+- fi
+- fi
+- fi
+-
+- if [ "$download_failed" = true ]; then
+- say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
+- say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+- return 1
+- fi
+-
+- say "Extracting zip from $download_link"
+- extract_dotnet_package "$zip_path" "$install_root"
+-
+- # Check if the SDK version is installed; if not, fail the installation.
+- # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+- if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then
+- IFS='-'
+- read -ra verArr <<< "$specific_version"
+- release_version="${verArr[0]}"
+- unset IFS;
+- say_verbose "Checking installation: version = $release_version"
+- if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then
+- return 0
+- fi
+- fi
+-
+- # Check if the standard SDK version is installed.
+- say_verbose "Checking installation: version = $specific_product_version"
+- if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then
+- return 0
+- fi
+-
+- say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
+- return 1
+-}
+-
+-args=("$@")
+-
+-local_version_file_relative_path="/.version"
+-bin_folder_relative_path=""
+-temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
+-
+-channel="LTS"
+-version="Latest"
+-json_file=""
+-install_dir=""
+-architecture=""
+-dry_run=false
+-no_path=false
+-no_cdn=false
+-azure_feed="https://dotnetcli.azureedge.net/dotnet"
+-uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
+-feed_credential=""
+-verbose=false
+-runtime=""
+-runtime_id=""
+-override_non_versioned_files=true
+-non_dynamic_parameters=""
+-
+-while [ $# -ne 0 ]
+-do
+- name="$1"
+- case "$name" in
+- -c|--channel|-[Cc]hannel)
+- shift
+- channel="$1"
+- ;;
+- -v|--version|-[Vv]ersion)
+- shift
+- version="$1"
+- ;;
+- -i|--install-dir|-[Ii]nstall[Dd]ir)
+- shift
+- install_dir="$1"
+- ;;
+- --arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
+- shift
+- architecture="$1"
+- ;;
+- --shared-runtime|-[Ss]hared[Rr]untime)
+- say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
+- if [ -z "$runtime" ]; then
+- runtime="dotnet"
+- fi
+- ;;
+- --runtime|-[Rr]untime)
+- shift
+- runtime="$1"
+- if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
+- say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
+- if [[ "$runtime" == "windowsdesktop" ]]; then
+- say_err "WindowsDesktop archives are manufactured for Windows platforms only."
+- fi
+- exit 1
+- fi
+- ;;
+- --dry-run|-[Dd]ry[Rr]un)
+- dry_run=true
+- ;;
+- --no-path|-[Nn]o[Pp]ath)
+- no_path=true
+- non_dynamic_parameters+=" $name"
+- ;;
+- --verbose|-[Vv]erbose)
+- verbose=true
+- non_dynamic_parameters+=" $name"
+- ;;
+- --no-cdn|-[Nn]o[Cc]dn)
+- no_cdn=true
+- non_dynamic_parameters+=" $name"
+- ;;
+- --azure-feed|-[Aa]zure[Ff]eed)
+- shift
+- azure_feed="$1"
+- non_dynamic_parameters+=" $name "\""$1"\"""
+- ;;
+- --uncached-feed|-[Uu]ncached[Ff]eed)
+- shift
+- uncached_feed="$1"
+- non_dynamic_parameters+=" $name "\""$1"\"""
+- ;;
+- --feed-credential|-[Ff]eed[Cc]redential)
+- shift
+- feed_credential="$1"
+- non_dynamic_parameters+=" $name "\""$1"\"""
+- ;;
+- --runtime-id|-[Rr]untime[Ii]d)
+- shift
+- runtime_id="$1"
+- non_dynamic_parameters+=" $name "\""$1"\"""
+- ;;
+- --jsonfile|-[Jj][Ss]on[Ff]ile)
+- shift
+- json_file="$1"
+- ;;
+- --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
+- override_non_versioned_files=false
+- non_dynamic_parameters+=" $name"
+- ;;
+- -?|--?|-h|--help|-[Hh]elp)
+- script_name="$(basename "$0")"
+- echo ".NET Tools Installer"
+- echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]"
+- echo " $script_name -h|-?|--help"
+- echo ""
+- echo "$script_name is a simple command line interface for obtaining dotnet cli."
+- echo ""
+- echo "Options:"
+- echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`."
+- echo " -Channel"
+- echo " Possible values:"
+- echo " - Current - most current release"
+- echo " - LTS - most current supported release"
+- echo " - 2-part version in a format A.B - represents a specific release"
+- echo " examples: 2.0; 1.0"
+- echo " - Branch name"
+- echo " examples: release/2.0.0; Master"
+- echo " Note: The version parameter overrides the channel parameter."
+- echo " -v,--version Use specific VERSION, Defaults to \`$version\`."
+- echo " -Version"
+- echo " Possible values:"
+- echo " - latest - most latest build on specific channel"
+- echo " - coherent - most latest coherent build on specific channel"
+- echo " coherent applies only to SDK downloads"
+- echo " - 3-part version in a format A.B.C - represents specific version of build"
+- echo " examples: 2.0.0-preview2-006120; 1.1.0"
+- echo " -i,--install-dir Install under specified location (see Install Location below)"
+- echo " -InstallDir"
+- echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
+- echo " --arch,-Architecture,-Arch"
+- echo " Possible values: x64, arm, and arm64"
+- echo " --runtime Installs a shared runtime only, without the SDK."
+- echo " -Runtime"
+- echo " Possible values:"
+- echo " - dotnet - the Microsoft.NETCore.App shared runtime"
+- echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
+- echo " --dry-run,-DryRun Do not perform installation. Display download link."
+- echo " --no-path, -NoPath Do not set PATH for the current process."
+- echo " --verbose,-Verbose Display diagnostics information."
+- echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user."
+- echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user."
+- echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified."
+- echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
+- echo " -SkipNonVersionedFiles"
+- echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
+- echo " --jsonfile Determines the SDK version from a user specified global.json file."
+- echo " Note: global.json must have a value for 'SDK:Version'"
+- echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
+- echo " -RuntimeId"
+- echo " -?,--?,-h,--help,-Help Shows this help message"
+- echo ""
+- echo "Obsolete parameters:"
+- echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
+- echo " This parameter is obsolete and may be removed in a future version of this script."
+- echo " Installs just the shared runtime bits, not the entire SDK."
+- echo ""
+- echo "Install Location:"
+- echo " Location is chosen in following order:"
+- echo " - --install-dir option"
+- echo " - Environmental variable DOTNET_INSTALL_DIR"
+- echo " - $HOME/.dotnet"
+- exit 0
+- ;;
+- *)
+- say_err "Unknown argument \`$name\`"
+- exit 1
+- ;;
+- esac
+-
+- shift
+-done
+-
+-if [ "$no_cdn" = true ]; then
+- azure_feed="$uncached_feed"
+-fi
+-
+-check_min_reqs
+-calculate_vars
+-script_name=$(basename "$0")
+-
+-if [ "$dry_run" = true ]; then
+- say "Payload URLs:"
+- say "Primary named payload URL: $download_link"
+- if [ "$valid_legacy_download_link" = true ]; then
+- say "Legacy named payload URL: $legacy_download_link"
+- fi
+- repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
+- if [[ "$runtime" == "dotnet" ]]; then
+- repeatable_command+=" --runtime "\""dotnet"\"""
+- elif [[ "$runtime" == "aspnetcore" ]]; then
+- repeatable_command+=" --runtime "\""aspnetcore"\"""
+- fi
+- repeatable_command+="$non_dynamic_parameters"
+- say "Repeatable invocation: $repeatable_command"
+- exit 0
+-fi
+-
+-check_pre_reqs
+-install_dotnet
+-
+-bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
+-if [ "$no_path" = false ]; then
+- say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
+- export PATH="$bin_path":"$PATH"
+-else
+- say "Binaries of dotnet can be found in $bin_path"
+-fi
+-
+-say "Installation finished successfully."
+diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
+index ead6a1d..fdfeea6 100755
+--- a/eng/common/dotnet-install.sh
++++ b/eng/common/dotnet-install.sh
+@@ -19,7 +19,7 @@ runtime='dotnet'
+ runtimeSourceFeed=''
+ runtimeSourceFeedKey=''
+ while [[ $# > 0 ]]; do
+- opt="$(echo "$1" | awk '{print tolower($0)}')"
++ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -version|-v)
+ shift
+@@ -49,13 +49,8 @@ while [[ $# > 0 ]]; do
+ shift
+ done
+
+-# Use uname to determine what the CPU is.
+-cpuname=$(uname -p)
+-# Some Linux platforms report unknown for platform, but the arch for machine.
+-if [[ "$cpuname" == "unknown" ]]; then
+- cpuname=$(uname -m)
+-fi
+-
++# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
++cpuname=$(uname -m)
+ case $cpuname in
+ aarch64)
+ buildarch=arm64
+@@ -75,7 +70,7 @@ case $cpuname in
+ ;;
+ esac
+
+-dotnetRoot="$repo_root/.dotnet"
++dotnetRoot="${repo_root}.dotnet"
+ if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
+ dotnetRoot="$dotnetRoot/$architecture"
+ fi
+diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
+new file mode 100644
+index 0000000..25e97ac
+--- /dev/null
++++ b/eng/common/generate-locproject.ps1
+@@ -0,0 +1,117 @@
++Param(
++ [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here
++ [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json
++ [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one
++ [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally
++)
++
++# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here:
++# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task
++
++Set-StrictMode -Version 2.0
++$ErrorActionPreference = "Stop"
++. $PSScriptRoot\tools.ps1
++
++Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
++
++$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
++$exclusions = @{ Exclusions = @() }
++if (Test-Path -Path $exclusionsFilePath)
++{
++ $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json
++}
++
++Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work
++
++# Template files
++$jsonFiles = @()
++$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
++$jsonTemplateFiles | ForEach-Object {
++ $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
++
++ $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
++ $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
++}
++
++$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
++
++$xlfFiles = @()
++
++$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf"
++$langXlfFiles = @()
++if ($allXlfFiles) {
++ $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf'
++ $firstLangCode = $Matches.1
++ $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf"
++}
++$langXlfFiles | ForEach-Object {
++ $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
++
++ $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
++ $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
++}
++
++$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles
++
++$locJson = @{
++ Projects = @(
++ @{
++ LanguageSet = $LanguageSet
++ LocItems = @(
++ $locFiles | ForEach-Object {
++ $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
++ $continue = $true
++ foreach ($exclusion in $exclusions.Exclusions) {
++ if ($outputPath.Contains($exclusion))
++ {
++ $continue = $false
++ }
++ }
++ $sourceFile = ($_.FullName | Resolve-Path -Relative)
++ if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') {
++ Remove-Item -Path $sourceFile
++ }
++ if ($continue)
++ {
++ if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') {
++ return @{
++ SourceFile = $sourceFile
++ CopyOption = "LangIDOnPath"
++ OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
++ }
++ }
++ else {
++ return @{
++ SourceFile = $sourceFile
++ CopyOption = "LangIDOnName"
++ OutputPath = $outputPath
++ }
++ }
++ }
++ }
++ )
++ }
++ )
++}
++
++$json = ConvertTo-Json $locJson -Depth 5
++Write-Host "LocProject.json generated:`n`n$json`n`n"
++Pop-Location
++
++if (!$UseCheckedInLocProjectJson) {
++ New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
++ Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json
++}
++else {
++ New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
++ Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json
++
++ if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
++ Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
++
++ exit 1
++ }
++ else {
++ Write-Host "Generated LocProject.json and current LocProject.json are identical."
++ }
++}
+\ No newline at end of file
+diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh
+old mode 100644
+new mode 100755
+index 29fc5db..5bd205b
+--- a/eng/common/init-tools-native.sh
++++ b/eng/common/init-tools-native.sh
+@@ -16,7 +16,7 @@ declare -A native_assets
+ . $scriptroot/native/common-library.sh
+
+ while (($# > 0)); do
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
++ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+@@ -76,24 +76,89 @@ while (($# > 0)); do
+ done
+
+ function ReadGlobalJsonNativeTools {
+- # Get the native-tools section from the global.json.
+- local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
+- # Only extract the contents of the object.
+- local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
+- native_tools_list=${native_tools_list//[\" ]/}
+- native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' )
+-
+- local old_IFS=$IFS
+- while read -r line; do
+- # Lines are of the form: 'tool:version'
+- IFS=:
+- while read -r key value; do
+- native_assets[$key]=$value
+- done <<< "$line"
+- done <<< "$native_tools_list"
+- IFS=$old_IFS
+-
+- return 0;
++ # happy path: we have a proper JSON parsing tool `jq(1)` in PATH!
++ if command -v jq &> /dev/null; then
++
++ # jq: read each key/value pair under "native-tools" entry and emit:
++ # KEY="" VALUE=""
++ # followed by a null byte.
++ #
++ # bash: read line with null byte delimeter and push to array (for later `eval`uation).
++
++ while IFS= read -rd '' line; do
++ native_assets+=("$line")
++ done < <(jq -r '. |
++ select(has("native-tools")) |
++ ."native-tools" |
++ keys[] as $k |
++ @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file")
++
++ return
++ fi
++
++ # Warning: falling back to manually parsing JSON, which is not recommended.
++
++ # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above.
++ # It has been tested with several weird strings with escaped characters in entries (key and value)
++ # and results were compared with the output of jq(1) in binary representation using xxd(1);
++ # just before the assignment to 'native_assets' array (above and below).
++
++ # try to capture the section under "native-tools".
++ if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then
++ return
++ fi
++
++ section="${BASH_REMATCH[1]}"
++
++ parseStarted=0
++ possibleEnd=0
++ escaping=0
++ escaped=0
++ isKey=1
++
++ for (( i=0; i<${#section}; i++ )); do
++ char="${section:$i:1}"
++ if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi
++
++ if ! ((escaping)) && [[ "$char" == "\\" ]]; then
++ escaping=1
++ elif ((escaping)) && ! ((escaped)); then
++ escaped=1
++ fi
++
++ if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then
++ parseStarted=1
++ possibleEnd=0
++ elif [[ "$char" == "'" ]]; then
++ token="$token'\\\''"
++ possibleEnd=0
++ elif ((escaping)) || [[ "$char" != "\"" ]]; then
++ token="$token$char"
++ possibleEnd=1
++ fi
++
++ if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then
++ # Use printf to unescape token to match jq(1)'s @sh formatting rules.
++ # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed.
++ printf -v token "'$token'"
++
++ if ((isKey)); then
++ KEY="$token"
++ isKey=0
++ else
++ line="KEY=$KEY VALUE=$token"
++ native_assets+=("$line")
++ isKey=1
++ fi
++
++ # reset for next token
++ parseStarted=0
++ token=
++ elif ((escaping)) && ((escaped)); then
++ escaping=0
++ escaped=0
++ fi
++ done
+ }
+
+ native_base_dir=$install_directory
+@@ -111,14 +176,14 @@ if [[ ${#native_assets[@]} -eq 0 ]]; then
+ exit 0;
+ else
+ native_installer_dir="$scriptroot/native"
+- for tool in "${!native_assets[@]}"
+- do
+- tool_version=${native_assets[$tool]}
+- installer_path="$native_installer_dir/install-$tool.sh"
++ for index in "${!native_assets[@]}"; do
++ eval "${native_assets["$index"]}"
++
++ installer_path="$native_installer_dir/install-$KEY.sh"
+ installer_command="$installer_path"
+ installer_command+=" --baseuri $base_uri"
+ installer_command+=" --installpath $install_bin"
+- installer_command+=" --version $tool_version"
++ installer_command+=" --version $VALUE"
+ echo $installer_command
+
+ if [[ $force = true ]]; then
+diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
+index b8f6529..92b7734 100644
+--- a/eng/common/internal-feed-operations.ps1
++++ b/eng/common/internal-feed-operations.ps1
+@@ -45,11 +45,11 @@ function SetupCredProvider {
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+- $nugetConfigPath = "$RepoRoot\NuGet.config"
++ $nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
+
+ if (-Not (Test-Path -Path $nugetConfigPath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
+- ExitWithExitCode 1
++ ExitWithExitCode 1
+ }
+
+ $endpoints = New-Object System.Collections.ArrayList
+@@ -63,8 +63,6 @@ function SetupCredProvider {
+ }
+
+ if (($endpoints | Measure-Object).Count -gt 0) {
+- # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
+- # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
+
+ # Create the environment variables the AzDo way
+@@ -87,7 +85,7 @@ function SetupCredProvider {
+
+ #Workaround for https://github.com/microsoft/msbuild/issues/4430
+ function InstallDotNetSdkAndRestoreArcade {
+- $dotnetTempDir = "$RepoRoot\dotnet"
++ $dotnetTempDir = Join-Path $RepoRoot "dotnet"
+ $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ $dotnet = "$dotnetTempDir\dotnet.exe"
+ $restoreProjPath = "$PSScriptRoot\restore.proj"
+diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
+index 9ed225e..9378223 100755
+--- a/eng/common/internal-feed-operations.sh
++++ b/eng/common/internal-feed-operations.sh
+@@ -39,7 +39,7 @@ function SetupCredProvider {
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+- local nugetConfigPath="$repo_root/NuGet.config"
++ local nugetConfigPath="{$repo_root}NuGet.config"
+
+ if [ ! "$nugetConfigPath" ]; then
+ Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
+@@ -62,8 +62,6 @@ function SetupCredProvider {
+ endpoints+=']'
+
+ if [ ${#endpoints} -gt 2 ]; then
+- # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
+- # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
+
+ echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
+@@ -103,7 +101,7 @@ authToken=''
+ repoName=''
+
+ while [[ $# > 0 ]]; do
+- opt="$(echo "$1" | awk '{print tolower($0)}')"
++ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ --operation)
+ operation=$2
+diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
+index c640123..eea19cd 100644
+--- a/eng/common/msbuild.ps1
++++ b/eng/common/msbuild.ps1
+@@ -5,6 +5,7 @@ Param(
+ [bool] $nodeReuse = $true,
+ [switch] $ci,
+ [switch] $prepareMachine,
++ [switch] $excludePrereleaseVS,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
+ )
+
+diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh
+index 8160cd5..20d3dad 100755
+--- a/eng/common/msbuild.sh
++++ b/eng/common/msbuild.sh
+@@ -19,7 +19,7 @@ prepare_machine=false
+ extra_args=''
+
+ while (($# > 0)); do
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
++ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --verbosity)
+ verbosity=$2
+diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
+index d7d1a65..adf707c 100644
+--- a/eng/common/native/CommonLibrary.psm1
++++ b/eng/common/native/CommonLibrary.psm1
+@@ -48,7 +48,7 @@ function DownloadAndExtract {
+ -Verbose:$Verbose
+
+ if ($DownloadStatus -Eq $False) {
+- Write-Error "Download failed"
++ Write-Error "Download failed from $Uri"
+ return $False
+ }
+
+diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh
+old mode 100644
+new mode 100755
+diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh
+index 12339a4..8a5e7cf 100755
+--- a/eng/common/native/install-cmake-test.sh
++++ b/eng/common/native/install-cmake-test.sh
+@@ -14,7 +14,7 @@ download_retries=5
+ retry_wait_time_seconds=30
+
+ while (($# > 0)); do
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
++ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+@@ -63,7 +63,7 @@ done
+
+ tool_name="cmake-test"
+ tool_os=$(GetCurrentOS)
+-tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
++tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
+ tool_arch="x86_64"
+ tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+ tool_install_directory="$install_path/$tool_name/$version"
+@@ -114,4 +114,4 @@ if [[ $? != 0 ]]; then
+ exit 1
+ fi
+
+-exit 0
+\ No newline at end of file
++exit 0
+diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh
+old mode 100644
+new mode 100755
+index 18041be..de496be
+--- a/eng/common/native/install-cmake.sh
++++ b/eng/common/native/install-cmake.sh
+@@ -14,7 +14,7 @@ download_retries=5
+ retry_wait_time_seconds=30
+
+ while (($# > 0)); do
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
++ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+@@ -63,7 +63,7 @@ done
+
+ tool_name="cmake"
+ tool_os=$(GetCurrentOS)
+-tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
++tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
+ tool_arch="x86_64"
+ tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+ tool_install_directory="$install_path/$tool_name/$version"
+@@ -114,4 +114,4 @@ if [[ $? != 0 ]]; then
+ exit 1
+ fi
+
+-exit 0
+\ No newline at end of file
++exit 0
+diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1
+index f397e1c..78f2d84 100644
+--- a/eng/common/native/install-tool.ps1
++++ b/eng/common/native/install-tool.ps1
+@@ -105,7 +105,7 @@ try {
+ Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
+ exit 1
+ } elseif (@($ToolFilePath).Length -Lt 1) {
+- Write-Host "$ToolName was not found in $ToolFilePath."
++ Write-Host "$ToolName was not found in $ToolInstallDirectory."
+ exit 1
+ }
+
+diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj
+deleted file mode 100644
+index 3b25359..0000000
+--- a/eng/common/performance/blazor_perf.proj
++++ /dev/null
+@@ -1,30 +0,0 @@
+-
+-
+- python3
+- $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk
+-
+-
+-
+-
+- %(Identity)
+-
+-
+-
+-
+- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+- $(ScenarioDirectory)blazor\
+-
+-
+- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+- $(ScenarioDirectory)blazor/
+-
+-
+-
+-
+- $(WorkItemDirectory)
+- cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27
+- $(Python) test.py sod --scenario-name "%(Identity)"
+- $(Python) post.py
+-
+-
+-
+\ No newline at end of file
+diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj
+deleted file mode 100644
+index 4264920..0000000
+--- a/eng/common/performance/crossgen_perf.proj
++++ /dev/null
+@@ -1,69 +0,0 @@
+-
+-
+-
+-
+- %(Identity)
+-
+-
+-
+-
+-
+- py -3
+- $(HelixPreCommands)
+- %HELIX_CORRELATION_PAYLOAD%\Core_Root
+- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+- $(ScenarioDirectory)crossgen\
+- $(ScenarioDirectory)crossgen2\
+-
+-
+- python3
+- $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update
+- $HELIX_CORRELATION_PAYLOAD/Core_Root
+- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+- $(ScenarioDirectory)crossgen/
+- $(ScenarioDirectory)crossgen2/
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+- $(WorkItemDirectory)
+- $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)
+-
+-
+-
+-
+-
+- $(WorkItemDirectory)
+- $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)
+-
+-
+-
+-
+-
+-
+- 4:00
+-
+-
+-
+- 4:00
+-
+-
+- $(WorkItemDirectory)
+- $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp
+- 1:00
+-
+-
+-
+\ No newline at end of file
+diff --git a/eng/common/performance/microbenchmarks.proj b/eng/common/performance/microbenchmarks.proj
+deleted file mode 100644
+index 94b6efb..0000000
+--- a/eng/common/performance/microbenchmarks.proj
++++ /dev/null
+@@ -1,144 +0,0 @@
+-
+-
+-
+- %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)
+- --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%
+- py -3
+- %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
+- %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
+-
+- $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%
+- %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts
+- %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline
+- %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj
+- %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe
+- %25%25
+- %HELIX_WORKITEM_ROOT%\testResults.xml
+-
+-
+-
+- $HELIX_CORRELATION_PAYLOAD
+- $(BaseDirectory)/performance
+-
+-
+-
+- $HELIX_WORKITEM_PAYLOAD
+- $(BaseDirectory)
+-
+-
+-
+- $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)
+- --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP
+- python3
+- $(BaseDirectory)/Core_Root/corerun
+- $(BaseDirectory)/Baseline_Core_Root/corerun
+- $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh
+- $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts
+- $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline
+- $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj
+- $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet
+- %25
+- $HELIX_WORKITEM_ROOT/testResults.xml
+-
+-
+-
+- $(CliArguments) --wasm
+-
+-
+-
+- --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe
+-
+-
+- --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun
+-
+-
+-
+- --corerun $(CoreRun)
+-
+-
+-
+- --corerun $(BaselineCoreRun)
+-
+-
+-
+- $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)
+-
+-
+-
+- $(WorkItemCommand) $(CliArguments)
+-
+-
+-
+- 2:30
+- 0:15
+-
+-
+-
+-
+- %(Identity)
+-
+-
+-
+-
+- 30
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+-
+- false
+-
+-
+-
+-
+-
+- $(WorkItemDirectory)
+- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+- $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
+- $(WorkItemTimeout)
+-
+-
+-
+-
+-
+- $(WorkItemDirectory)
+- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
+- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
+- $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
+- 4:00
+-
+-
+-
+\ No newline at end of file
+diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
+deleted file mode 100644
+index 656c0bd..0000000
+--- a/eng/common/performance/performance-setup.ps1
++++ /dev/null
+@@ -1,147 +0,0 @@
+-Param(
+- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
+- [string] $CoreRootDirectory,
+- [string] $BaselineCoreRootDirectory,
+- [string] $Architecture="x64",
+- [string] $Framework="net5.0",
+- [string] $CompilationMode="Tiered",
+- [string] $Repository=$env:BUILD_REPOSITORY_NAME,
+- [string] $Branch=$env:BUILD_SOURCEBRANCH,
+- [string] $CommitSha=$env:BUILD_SOURCEVERSION,
+- [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
+- [string] $RunCategories="Libraries Runtime",
+- [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
+- [string] $Kind="micro",
+- [switch] $LLVM,
+- [switch] $MonoInterpreter,
+- [switch] $MonoAOT,
+- [switch] $Internal,
+- [switch] $Compare,
+- [string] $MonoDotnet="",
+- [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind"
+-)
+-
+-$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
+-$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
+-$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
+-
+-$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
+-$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
+-$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
+-$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+-$Creator = $env:BUILD_DEFINITIONNAME
+-$PerfLabArguments = ""
+-$HelixSourcePrefix = "pr"
+-
+-$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
+-
+-# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5.
+-if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) {
+- $Queue = "Windows.10.Amd64.ClientRS5.Open"
+-}
+-
+-if ($Compare) {
+- $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
+- $PerfLabArguments = ""
+- $ExtraBenchmarkDotNetArguments = ""
+-}
+-
+-if ($Internal) {
+- $Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
+- $PerfLabArguments = "--upload-to-perflab-container"
+- $ExtraBenchmarkDotNetArguments = ""
+- $Creator = ""
+- $HelixSourcePrefix = "official"
+-}
+-
+-if($MonoInterpreter)
+-{
+- $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
+-}
+-
+-if($MonoDotnet -ne "")
+-{
+- $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
+- if($ExtraBenchmarkDotNetArguments -eq "")
+- {
+- #FIX ME: We need to block these tests as they don't run on mono for now
+- $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+- }
+- else
+- {
+- #FIX ME: We need to block these tests as they don't run on mono for now
+- $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+- }
+-}
+-
+-# FIX ME: This is a workaround until we get this from the actual pipeline
+-$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
+-$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
+-
+-
+-#This grabs the LKG version number of dotnet and passes it to our scripts
+-$VersionJSON = Get-Content global.json | ConvertFrom-Json
+-$DotNetVersion = $VersionJSON.tools.dotnet
+-$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments"
+-
+-
+-if ($RunFromPerformanceRepo) {
+- $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
+-
+- robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
+-}
+-else {
+- git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
+-}
+-
+-if($MonoDotnet -ne "")
+-{
+- $UsingMono = "true"
+- $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
+- Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
+-}
+-
+-if ($UseCoreRun) {
+- $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
+- Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
+-}
+-if ($UseBaselineCoreRun) {
+- $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
+- Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
+-}
+-
+-$DocsDir = (Join-Path $PerformanceDirectory "docs")
+-robocopy $DocsDir $WorkItemDirectory
+-
+-# Set variables that we will need to have in future steps
+-$ci = $true
+-
+-. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
+-
+-# Directories
+-Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
+-
+-# Script Arguments
+-Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
+-
+-# Helix Arguments
+-Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
+-Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
+-
+-exit 0
+\ No newline at end of file
+diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
+deleted file mode 100755
+index 806e56c..0000000
+--- a/eng/common/performance/performance-setup.sh
++++ /dev/null
+@@ -1,289 +0,0 @@
+-#!/usr/bin/env bash
+-
+-source_directory=$BUILD_SOURCESDIRECTORY
+-core_root_directory=
+-baseline_core_root_directory=
+-architecture=x64
+-framework=net5.0
+-compilation_mode=tiered
+-repository=$BUILD_REPOSITORY_NAME
+-branch=$BUILD_SOURCEBRANCH
+-commit_sha=$BUILD_SOURCEVERSION
+-build_number=$BUILD_BUILDNUMBER
+-internal=false
+-compare=false
+-mono_dotnet=
+-kind="micro"
+-llvm=false
+-monointerpreter=false
+-monoaot=false
+-run_categories="Libraries Runtime"
+-csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
+-configurations="CompliationMode=$compilation_mode RunKind=$kind"
+-run_from_perf_repo=false
+-use_core_run=true
+-use_baseline_core_run=true
+-using_mono=false
+-wasm_runtime_loc=
+-using_wasm=false
+-use_latest_dotnet=false
+-
+-while (($# > 0)); do
+- lowerI="$(echo $1 | awk '{print tolower($0)}')"
+- case $lowerI in
+- --sourcedirectory)
+- source_directory=$2
+- shift 2
+- ;;
+- --corerootdirectory)
+- core_root_directory=$2
+- shift 2
+- ;;
+- --baselinecorerootdirectory)
+- baseline_core_root_directory=$2
+- shift 2
+- ;;
+- --architecture)
+- architecture=$2
+- shift 2
+- ;;
+- --framework)
+- framework=$2
+- shift 2
+- ;;
+- --compilationmode)
+- compilation_mode=$2
+- shift 2
+- ;;
+- --repository)
+- repository=$2
+- shift 2
+- ;;
+- --branch)
+- branch=$2
+- shift 2
+- ;;
+- --commitsha)
+- commit_sha=$2
+- shift 2
+- ;;
+- --buildnumber)
+- build_number=$2
+- shift 2
+- ;;
+- --kind)
+- kind=$2
+- configurations="CompilationMode=$compilation_mode RunKind=$kind"
+- shift 2
+- ;;
+- --runcategories)
+- run_categories=$2
+- shift 2
+- ;;
+- --csproj)
+- csproj=$2
+- shift 2
+- ;;
+- --internal)
+- internal=true
+- shift 1
+- ;;
+- --llvm)
+- llvm=true
+- shift 1
+- ;;
+- --monointerpreter)
+- monointerpreter=true
+- shift 1
+- ;;
+- --monoaot)
+- monoaot=true
+- shift 1
+- ;;
+- --monodotnet)
+- mono_dotnet=$2
+- shift 2
+- ;;
+- --wasm)
+- wasm_runtime_loc=$2
+- shift 2
+- ;;
+- --compare)
+- compare=true
+- shift 1
+- ;;
+- --configurations)
+- configurations=$2
+- shift 2
+- ;;
+- --latestdotnet)
+- use_latest_dotnet=true
+- shift 1
+- ;;
+- *)
+- echo "Common settings:"
+- echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
+- echo " --architecture Architecture of the testing being run"
+- echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
+- echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
+- echo " --help Print help and exit"
+- echo ""
+- echo "Advanced settings:"
+- echo " --framework The framework to run, if not running in master"
+- echo " --compliationmode The compilation mode if not passing --configurations"
+- echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
+- echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
+- echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
+- echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
+- echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
+- echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
+- echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
+- echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
+- echo " --internal If the benchmarks are running as an official job."
+- echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
+- echo " --wasm Path to the unpacked wasm runtime pack."
+- echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
+- echo ""
+- exit 0
+- ;;
+- esac
+-done
+-
+-if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
+- run_from_perf_repo=true
+-fi
+-
+-if [ -z "$configurations" ]; then
+- configurations="CompilationMode=$compilation_mode"
+-fi
+-
+-if [ -z "$core_root_directory" ]; then
+- use_core_run=false
+-fi
+-
+-if [ -z "$baseline_core_root_directory" ]; then
+- use_baseline_core_run=false
+-fi
+-
+-payload_directory=$source_directory/Payload
+-performance_directory=$payload_directory/performance
+-workitem_directory=$source_directory/workitem
+-extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+-perflab_arguments=
+-queue=Ubuntu.1804.Amd64.Open
+-creator=$BUILD_DEFINITIONNAME
+-helix_source_prefix="pr"
+-
+-if [[ "$compare" == true ]]; then
+- extra_benchmark_dotnet_arguments=
+- perflab_arguments=
+-
+- # No open queues for arm64
+- if [[ "$architecture" = "arm64" ]]; then
+- echo "Compare not available for arm64"
+- exit 1
+- fi
+-
+- queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
+-fi
+-
+-if [[ "$internal" == true ]]; then
+- perflab_arguments="--upload-to-perflab-container"
+- helix_source_prefix="official"
+- creator=
+- extra_benchmark_dotnet_arguments=
+-
+- if [[ "$architecture" = "arm64" ]]; then
+- queue=Ubuntu.1804.Arm64.Perf
+- else
+- queue=Ubuntu.1804.Amd64.Tiger.Perf
+- fi
+-fi
+-
+-if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
+- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
+- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
+-fi
+-
+-if [[ "$wasm_runtime_loc" != "" ]]; then
+- configurations="CompilationMode=wasm RunKind=$kind"
+- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
+-fi
+-
+-if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
+- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
+-fi
+-
+-common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
+-setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
+-
+-
+-if [[ "$use_latest_dotnet" = false ]]; then
+- # Get the tools section from the global.json.
+- # This grabs the LKG version number of dotnet and passes it to our scripts
+- dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'`
+- setup_arguments="--dotnet-versions $dotnet_version $setup_arguments"
+-fi
+-
+-if [[ "$run_from_perf_repo" = true ]]; then
+- payload_directory=
+- workitem_directory=$source_directory
+- performance_directory=$workitem_directory
+- setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
+-else
+- git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
+-
+- docs_directory=$performance_directory/docs
+- mv $docs_directory $workitem_directory
+-fi
+-
+-if [[ "$wasm_runtime_loc" != "" ]]; then
+- using_wasm=true
+- wasm_dotnet_path=$payload_directory/dotnet-wasm
+- mv $wasm_runtime_loc $wasm_dotnet_path
+- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
+-fi
+-
+-if [[ "$mono_dotnet" != "" ]]; then
+- using_mono=true
+- mono_dotnet_path=$payload_directory/dotnet-mono
+- mv $mono_dotnet $mono_dotnet_path
+-fi
+-
+-if [[ "$use_core_run" = true ]]; then
+- new_core_root=$payload_directory/Core_Root
+- mv $core_root_directory $new_core_root
+-fi
+-
+-if [[ "$use_baseline_core_run" = true ]]; then
+- new_baseline_core_root=$payload_directory/Baseline_Core_Root
+- mv $baseline_core_root_directory $new_baseline_core_root
+-fi
+-
+-ci=true
+-
+-_script_dir=$(pwd)/eng/common
+-. "$_script_dir/pipeline-logging-functions.sh"
+-
+-# Make sure all of our variables are available for future steps
+-Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
+-Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
+diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1
+index 8484451..8e422c5 100644
+--- a/eng/common/pipeline-logging-functions.ps1
++++ b/eng/common/pipeline-logging-functions.ps1
+@@ -29,14 +29,14 @@ function Write-PipelineTelemetryError {
+ [switch]$AsOutput,
+ [switch]$Force)
+
+- $PSBoundParameters.Remove('Category') | Out-Null
++ $PSBoundParameters.Remove('Category') | Out-Null
+
+- if($Force -Or ((Test-Path variable:ci) -And $ci)) {
+- $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+- }
+- $PSBoundParameters.Remove('Message') | Out-Null
+- $PSBoundParameters.Add('Message', $Message)
+- Write-PipelineTaskError @PSBoundParameters
++ if ($Force -Or ((Test-Path variable:ci) -And $ci)) {
++ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
++ }
++ $PSBoundParameters.Remove('Message') | Out-Null
++ $PSBoundParameters.Add('Message', $Message)
++ Write-PipelineTaskError @PSBoundParameters
+ }
+
+ # Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+@@ -55,8 +55,8 @@ function Write-PipelineTaskError {
+ [switch]$Force
+ )
+
+- if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
+- if($Type -eq 'error') {
++ if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
++ if ($Type -eq 'error') {
+ Write-Host $Message -ForegroundColor Red
+ return
+ }
+@@ -66,47 +66,61 @@ function Write-PipelineTaskError {
+ }
+ }
+
+- if(($Type -ne 'error') -and ($Type -ne 'warning')) {
++ if (($Type -ne 'error') -and ($Type -ne 'warning')) {
+ Write-Host $Message
+ return
+ }
+ $PSBoundParameters.Remove('Force') | Out-Null
+- if(-not $PSBoundParameters.ContainsKey('Type')) {
++ if (-not $PSBoundParameters.ContainsKey('Type')) {
+ $PSBoundParameters.Add('Type', 'error')
+ }
+ Write-LogIssue @PSBoundParameters
+- }
++}
+
+- function Write-PipelineSetVariable {
++function Write-PipelineSetVariable {
+ [CmdletBinding()]
+ param(
+- [Parameter(Mandatory = $true)]
+- [string]$Name,
+- [string]$Value,
+- [switch]$Secret,
+- [switch]$AsOutput,
+- [bool]$IsMultiJobVariable=$true)
+-
+- if((Test-Path variable:ci) -And $ci) {
++ [Parameter(Mandatory = $true)]
++ [string]$Name,
++ [string]$Value,
++ [switch]$Secret,
++ [switch]$AsOutput,
++ [bool]$IsMultiJobVariable = $true)
++
++ if ((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
+- 'variable' = $Name
+- 'isSecret' = $Secret
+- 'isOutput' = $IsMultiJobVariable
++ 'variable' = $Name
++ 'isSecret' = $Secret
++ 'isOutput' = $IsMultiJobVariable
+ } -AsOutput:$AsOutput
+- }
+- }
++ }
++}
+
+- function Write-PipelinePrependPath {
++function Write-PipelinePrependPath {
+ [CmdletBinding()]
+ param(
+- [Parameter(Mandatory=$true)]
+- [string]$Path,
+- [switch]$AsOutput)
++ [Parameter(Mandatory = $true)]
++ [string]$Path,
++ [switch]$AsOutput)
+
+- if((Test-Path variable:ci) -And $ci) {
++ if ((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
+- }
+- }
++ }
++}
++
++function Write-PipelineSetResult {
++ [CmdletBinding()]
++ param(
++ [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")]
++ [Parameter(Mandatory = $true)]
++ [string]$Result,
++ [string]$Message)
++ if ((Test-Path variable:ci) -And $ci) {
++ Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{
++ 'result' = $Result
++ }
++ }
++}
+
+ <########################################
+ # Private functions.
+@@ -123,7 +137,8 @@ function Format-LoggingCommandData {
+ foreach ($mapping in $script:loggingCommandEscapeMappings) {
+ $Value = $Value.Replace($mapping.Token, $mapping.Replacement)
+ }
+- } else {
++ }
++ else {
+ for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
+ $mapping = $script:loggingCommandEscapeMappings[$i]
+ $Value = $Value.Replace($mapping.Replacement, $mapping.Token)
+@@ -156,7 +171,8 @@ function Format-LoggingCommand {
+ if ($first) {
+ $null = $sb.Append(' ')
+ $first = $false
+- } else {
++ }
++ else {
+ $null = $sb.Append(';')
+ }
+
+@@ -193,7 +209,8 @@ function Write-LoggingCommand {
+ $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
+ if ($AsOutput) {
+ $command
+- } else {
++ }
++ else {
+ Write-Host $command
+ }
+ }
+@@ -212,12 +229,12 @@ function Write-LogIssue {
+ [switch]$AsOutput)
+
+ $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
+- 'type' = $Type
+- 'code' = $ErrCode
+- 'sourcepath' = $SourcePath
+- 'linenumber' = $LineNumber
+- 'columnnumber' = $ColumnNumber
+- }
++ 'type' = $Type
++ 'code' = $ErrCode
++ 'sourcepath' = $SourcePath
++ 'linenumber' = $LineNumber
++ 'columnnumber' = $ColumnNumber
++ }
+ if ($AsOutput) {
+ return $command
+ }
+@@ -229,7 +246,8 @@ function Write-LogIssue {
+ $foregroundColor = [System.ConsoleColor]::Red
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+- } else {
++ }
++ else {
+ $foregroundColor = $host.PrivateData.WarningForegroundColor
+ $backgroundColor = $host.PrivateData.WarningBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
+index 6cd0a34..6a0b225 100755
+--- a/eng/common/pipeline-logging-functions.sh
++++ b/eng/common/pipeline-logging-functions.sh
+@@ -6,7 +6,7 @@ function Write-PipelineTelemetryError {
+ local function_args=()
+ local message=''
+ while [[ $# -gt 0 ]]; do
+- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -category|-c)
+ telemetry_category=$2
+@@ -48,7 +48,7 @@ function Write-PipelineTaskError {
+ local force=false
+
+ while [[ $# -gt 0 ]]; do
+- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -type|-t)
+ message_type=$2
+@@ -122,7 +122,7 @@ function Write-PipelineSetVariable {
+ local is_multi_job_variable=true
+
+ while [[ $# -gt 0 ]]; do
+- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -name|-n)
+ name=$2
+@@ -164,7 +164,7 @@ function Write-PipelinePrependPath {
+ local prepend_path=''
+
+ while [[ $# -gt 0 ]]; do
+- opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -path|-p)
+ prepend_path=$2
+@@ -179,4 +179,28 @@ function Write-PipelinePrependPath {
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.prependpath]$prepend_path"
+ fi
+-}
+\ No newline at end of file
++}
++
++function Write-PipelineSetResult {
++ local result=''
++ local message=''
++
++ while [[ $# -gt 0 ]]; do
++ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
++ case "$opt" in
++ -result|-r)
++ result=$2
++ shift
++ ;;
++ -message|-m)
++ message=$2
++ shift
++ ;;
++ esac
++ shift
++ done
++
++ if [[ "$ci" == true ]]; then
++ echo "##vso[task.complete result=$result;]$message"
++ fi
++}
+diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
+index 7d49744..534f698 100644
+--- a/eng/common/post-build/post-build-utils.ps1
++++ b/eng/common/post-build/post-build-utils.ps1
+@@ -69,9 +69,9 @@ function Trigger-Subscription([string]$SubscriptionId) {
+
+ function Validate-MaestroVars {
+ try {
+- Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
+- Get-Variable MaestroApiVersion -Scope Global | Out-Null
+- Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
++ Get-Variable MaestroApiEndPoint | Out-Null
++ Get-Variable MaestroApiVersion | Out-Null
++ Get-Variable MaestroApiAccessToken | Out-Null
+
+ if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
+index 650b13b..2427ca6 100644
+--- a/eng/common/post-build/publish-using-darc.ps1
++++ b/eng/common/post-build/publish-using-darc.ps1
+@@ -10,21 +10,27 @@ param(
+ [Parameter(Mandatory=$false)][string] $EnableNugetValidation,
+ [Parameter(Mandatory=$false)][string] $PublishInstallersAndChecksums,
+ [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
++ [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $SigningValidationAdditionalParameters
+ )
+
+ try {
+ . $PSScriptRoot\post-build-utils.ps1
+- # Hard coding darc version till the next arcade-services roll out, cos this version has required API changes for darc add-build-to-channel
+- $darc = Get-Darc "1.1.0-beta.20418.1"
++
++ $darc = Get-Darc
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ if ("" -ne $ArtifactsPublishingAdditionalParameters) {
+- $optionalParams.Add("artifact-publishing-parameters") | Out-Null
++ $optionalParams.Add("--artifact-publishing-parameters") | Out-Null
+ $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
+ }
+
++ if ("" -ne $SymbolPublishingAdditionalParameters) {
++ $optionalParams.Add("--symbol-publishing-parameters") | Out-Null
++ $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
++ }
++
+ if ("false" -eq $WaitPublishingFinish) {
+ $optionalParams.Add("--no-wait") | Out-Null
+ }
+@@ -54,7 +60,7 @@ try {
+ --id $buildId `
+ --publishing-infra-version $PublishingInfraVersion `
+ --default-channels `
+- --source-branch master `
++ --source-branch main `
+ --azdev-pat $AzdoToken `
+ --bar-uri $MaestroApiEndPoint `
+ --password $MaestroToken `
+diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
+index c7e7ae6..85c8986 100644
+--- a/eng/common/post-build/sourcelink-validation.ps1
++++ b/eng/common/post-build/sourcelink-validation.ps1
+@@ -14,7 +14,9 @@ param(
+ $global:RepoFiles = @{}
+
+ # Maximum number of jobs to run in parallel
+-$MaxParallelJobs = 6
++$MaxParallelJobs = 16
++
++$MaxRetries = 5
+
+ # Wait time between check for system load
+ $SecondsBetweenLoadChecks = 10
+@@ -29,7 +31,10 @@ $ValidatePackage = {
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-Host "Input file does not exist: $PackagePath"
+- return 1
++ return [pscustomobject]@{
++ result = 1
++ packagePath = $PackagePath
++ }
+ }
+
+ # Extensions for which we'll look for SourceLink information
+@@ -59,7 +64,10 @@ $ValidatePackage = {
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith('.resources.dll')) {
+- return
++ return [pscustomobject]@{
++ result = 0
++ packagePath = $PackagePath
++ }
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+@@ -91,36 +99,49 @@ $ValidatePackage = {
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+- if ( !($Cache.ContainsKey($FilePath)) ) {
+- try {
+- $Uri = $Link -as [System.URI]
+-
+- # Only GitHub links are valid
+- if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
+- $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
++ $totalRetries = 0
++
++ while ($totalRetries -lt $using:MaxRetries) {
++ if ( !($Cache.ContainsKey($FilePath)) ) {
++ try {
++ $Uri = $Link -as [System.URI]
++
++ # Only GitHub links are valid
++ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
++ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
++ }
++ else {
++ # If it's not a github link, we want to break out of the loop and not retry.
++ $Status = 0
++ $totalRetries = $using:MaxRetries
++ }
+ }
+- else {
++ catch {
++ Write-Host $_
+ $Status = 0
+ }
+ }
+- catch {
+- write-host $_
+- $Status = 0
+- }
+- }
+
+- if ($Status -ne 200) {
+- if ($NumFailedLinks -eq 0) {
+- if ($FailedFiles.Value -eq 0) {
+- Write-Host
++ if ($Status -ne 200) {
++ $totalRetries++
++
++ if ($totalRetries -ge $using:MaxRetries) {
++ if ($NumFailedLinks -eq 0) {
++ if ($FailedFiles.Value -eq 0) {
++ Write-Host
++ }
++
++ Write-Host "`tFile $RealPath has broken links:"
++ }
++
++ Write-Host "`t`tFailed to retrieve $Link"
++
++ $NumFailedLinks++
+ }
+-
+- Write-Host "`tFile $RealPath has broken links:"
+ }
+-
+- Write-Host "`t`tFailed to retrieve $Link"
+-
+- $NumFailedLinks++
++ else {
++ break
++ }
+ }
+ }
+ }
+@@ -136,7 +157,7 @@ $ValidatePackage = {
+ }
+ }
+ catch {
+-
++ Write-Host $_
+ }
+ finally {
+ $zip.Dispose()
+@@ -161,9 +182,12 @@ $ValidatePackage = {
+ function CheckJobResult(
+ $result,
+ $packagePath,
+- [ref]$ValidationFailures) {
+- if ($jobResult.result -ne '0') {
+- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
++ [ref]$ValidationFailures,
++ [switch]$logErrors) {
++ if ($result -ne '0') {
++ if ($logErrors) {
++ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
++ }
+ $ValidationFailures.Value++
+ }
+ }
+@@ -217,6 +241,7 @@ function ValidateSourceLinkLinks {
+ # Process each NuGet package in parallel
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
++ Write-Host "Starting $($_.FullName)"
+ Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+@@ -228,16 +253,14 @@ function ValidateSourceLinkLinks {
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+- CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
++ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
+ Remove-Job -Id $Job.Id
+ }
+ }
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+- if ($jobResult -ne '0') {
+- $ValidationFailures++
+- }
++ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
+ Remove-Job -Id $Job.Id
+ }
+ if ($ValidationFailures -gt 0) {
+@@ -266,6 +289,10 @@ function InstallSourcelinkCli {
+ try {
+ InstallSourcelinkCli
+
++ foreach ($Job in @(Get-Job)) {
++ Remove-Job -Id $Job.Id
++ }
++
+ ValidateSourceLinkLinks
+ }
+ catch {
+diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
+index fcc6019..a5af041 100644
+--- a/eng/common/post-build/symbols-validation.ps1
++++ b/eng/common/post-build/symbols-validation.ps1
+@@ -1,30 +1,49 @@
+ param(
+- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+- [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
+- [Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
+- [Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
++ [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
++ [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
++ [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
++ [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs
++ [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
++ [Parameter(Mandatory = $false)][switch] $Clean # Clean extracted symbols directory after checking symbols
+ )
+
+ # Maximum number of jobs to run in parallel
+-$MaxParallelJobs = 6
++$MaxParallelJobs = 16
++
++# Max number of retries
++$MaxRetry = 5
+
+ # Wait time between check for system load
+ $SecondsBetweenLoadChecks = 10
+
++# Set error codes
++Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1
++Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2
++
++$WindowsPdbVerificationParam = ""
++if ($CheckForWindowsPdbs) {
++ $WindowsPdbVerificationParam = "--windows-pdbs"
++}
++
+ $CountMissingSymbols = {
+ param(
+- [string] $PackagePath # Path to a NuGet package
++ [string] $PackagePath, # Path to a NuGet package
++ [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
++ Write-Host "Validating $PackagePath "
++
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+- return -2
++ return [pscustomobject]@{
++ result = $using:ERROR_FILEDOESNOTEXIST
++ packagePath = $PackagePath
++ }
+ }
+
+ # Extensions for which we'll look for symbols
+@@ -45,24 +64,25 @@ $CountMissingSymbols = {
+ Write-Host "Something went wrong extracting $PackagePath"
+ Write-Host $_
+ return [pscustomobject]@{
+- result = -1
++ result = $using:ERROR_BADEXTRACT
+ packagePath = $PackagePath
+ }
+ }
+
+ Get-ChildItem -Recurse $ExtractPath |
+- Where-Object {$RelevantExtensions -contains $_.Extension} |
+- ForEach-Object {
+- $FileName = $_.FullName
+- if ($FileName -Match '\\ref\\') {
+- Write-Host "`t Ignoring reference assembly file " $FileName
+- return
+- }
++ Where-Object { $RelevantExtensions -contains $_.Extension } |
++ ForEach-Object {
++ $FileName = $_.FullName
++ if ($FileName -Match '\\ref\\') {
++ Write-Host "`t Ignoring reference assembly file " $FileName
++ return
++ }
+
+- $FirstMatchingSymbolDescriptionOrDefault = {
++ $FirstMatchingSymbolDescriptionOrDefault = {
+ param(
+- [string] $FullPath, # Full path to the module that has to be checked
+- [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
++ [string] $FullPath, # Full path to the module that has to be checked
++ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
++ [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs.
+ [string] $SymbolsPath
+ )
+
+@@ -87,56 +107,76 @@ $CountMissingSymbols = {
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+-
++
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+- & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
++ $totalRetries = 0
+
+- if (Test-Path $PdbPath) {
+- return 'PDB'
+- }
+- elseif (Test-Path $NGenPdb) {
+- return 'NGen PDB'
+- }
+- elseif (Test-Path $SODbg) {
+- return 'DBG for SO'
+- }
+- elseif (Test-Path $DylibDwarf) {
+- return 'Dwarf for Dylib'
+- }
+- elseif (Test-Path $SymbolPath) {
+- return 'Module'
+- }
+- else {
+- return $null
++ while ($totalRetries -lt $using:MaxRetry) {
++
++ # Save the output and get diagnostic output
++ $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
++
++ if (Test-Path $PdbPath) {
++ return 'PDB'
++ }
++ elseif (Test-Path $NGenPdb) {
++ return 'NGen PDB'
++ }
++ elseif (Test-Path $SODbg) {
++ return 'DBG for SO'
++ }
++ elseif (Test-Path $DylibDwarf) {
++ return 'Dwarf for Dylib'
++ }
++ elseif (Test-Path $SymbolPath) {
++ return 'Module'
++ }
++ else
++ {
++ $totalRetries++
++ }
+ }
++
++ return $null
+ }
+
+- $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
+- $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
+-
+- Write-Host -NoNewLine "`t Checking file " $FileName "... "
++ $FileGuid = New-Guid
++ $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid
++
++ $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault `
++ -FullPath $FileName `
++ -TargetServerParam '--microsoft-symbol-server' `
++ -SymbolsPath "$ExpandedSymbolsPath-msdl" `
++ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
++ $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault `
++ -FullPath $FileName `
++ -TargetServerParam '--internal-server' `
++ -SymbolsPath "$ExpandedSymbolsPath-symweb" `
++ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
++
++ Write-Host -NoNewLine "`t Checking file " $FileName "... "
+
+- if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+- Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
++ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
++ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
++ }
++ else {
++ $MissingSymbols++
++
++ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
++ Write-Host 'No symbols found on MSDL or SymWeb!'
+ }
+ else {
+- $MissingSymbols++
+-
+- if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+- Write-Host 'No symbols found on MSDL or SymWeb!'
++ if ($SymbolsOnMSDL -eq $null) {
++ Write-Host 'No symbols found on MSDL!'
+ }
+ else {
+- if ($SymbolsOnMSDL -eq $null) {
+- Write-Host 'No symbols found on MSDL!'
+- }
+- else {
+- Write-Host 'No symbols found on SymWeb!'
+- }
++ Write-Host 'No symbols found on SymWeb!'
+ }
+ }
+ }
++ }
+
+ if ($using:Clean) {
+ Remove-Item $ExtractPath -Recurse -Force
+@@ -145,24 +185,31 @@ $CountMissingSymbols = {
+ Pop-Location
+
+ return [pscustomobject]@{
+- result = $MissingSymbols
+- packagePath = $PackagePath
+- }
++ result = $MissingSymbols
++ packagePath = $PackagePath
++ }
+ }
+
+ function CheckJobResult(
+- $result,
+- $packagePath,
+- [ref]$DupedSymbols,
+- [ref]$TotalFailures) {
+- if ($result -eq '-1') {
++ $result,
++ $packagePath,
++ [ref]$DupedSymbols,
++ [ref]$TotalFailures) {
++ if ($result -eq $ERROR_BADEXTRACT) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
+ $DupedSymbols.Value++
+ }
+- elseif ($jobResult.result -ne '0') {
++ elseif ($result -eq $ERROR_FILEDOESNOTEXIST) {
++ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist"
++ $TotalFailures.Value++
++ }
++ elseif ($result -gt '0') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
+ $TotalFailures.Value++
+ }
++ else {
++ Write-Host "All symbols verified for package $packagePath"
++ }
+ }
+
+ function CheckSymbolsAvailable {
+@@ -170,6 +217,7 @@ function CheckSymbolsAvailable {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
++ $TotalPackages = 0
+ $TotalFailures = 0
+ $DupedSymbols = 0
+
+@@ -192,9 +240,9 @@ function CheckSymbolsAvailable {
+ return
+ }
+
+- Write-Host "Validating $FileName "
++ $TotalPackages++
+
+- Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
++ Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null
+
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+@@ -219,11 +267,11 @@ function CheckSymbolsAvailable {
+
+ if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
+ if ($TotalFailures -gt 0) {
+- Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
++ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages"
+ }
+
+ if ($DupedSymbols -gt 0) {
+- Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
++ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted"
+ }
+
+ ExitWithExitCode 1
+diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
+index f55c43c..b1bca63 100644
+--- a/eng/common/sdk-task.ps1
++++ b/eng/common/sdk-task.ps1
+@@ -34,7 +34,7 @@ function Print-Usage() {
+ function Build([string]$target) {
+ $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
+ $log = Join-Path $LogDir "$task$logSuffix.binlog"
+- $outputPath = Join-Path $ToolsetDir "$task\\"
++ $outputPath = Join-Path $ToolsetDir "$task\"
+
+ MSBuild $taskProject `
+ /bl:$log `
+@@ -53,7 +53,7 @@ try {
+ }
+
+ if ($task -eq "") {
+- Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red
++ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '"
+ Print-Usage
+ ExitWithExitCode 1
+ }
+@@ -64,7 +64,7 @@ try {
+ $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
+ }
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
+- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.8.0-preview3" -MemberType NoteProperty
++ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.10.0-preview2" -MemberType NoteProperty
+ }
+ if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
+ $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
+@@ -78,7 +78,7 @@ try {
+
+ $taskProject = GetSdkTaskProject $task
+ if (!(Test-Path $taskProject)) {
+- Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
++ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
+ ExitWithExitCode 1
+ }
+
+diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
+index b681d79..2881a56 100644
+--- a/eng/common/sdl/execute-all-sdl-tools.ps1
++++ b/eng/common/sdl/execute-all-sdl-tools.ps1
+@@ -32,7 +32,7 @@ try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+- $LASTEXITCODE = 0
++ $global:LASTEXITCODE = 0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+@@ -87,10 +87,6 @@ try {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
+
+- if ($UpdateBaseline) {
+- & (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
+- }
+-
+ if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
+index a68bf0b..3ac1d92 100644
+--- a/eng/common/sdl/init-sdl.ps1
++++ b/eng/common/sdl/init-sdl.ps1
+@@ -10,7 +10,7 @@ Param(
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+-$LASTEXITCODE = 0
++$global:LASTEXITCODE = 0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+@@ -29,18 +29,7 @@ $zipFile = "$WorkingDirectory/gdn.zip"
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+ $gdnFolder = (Join-Path $WorkingDirectory '.gdn')
+-try {
+- # We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
+- Write-Host 'Downloading gdn folder from internal config repostiory...'
+- Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
+- if (Test-Path $gdnFolder) {
+- # Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
+- Remove-Item -Force -Recurse $gdnFolder
+- }
+- [System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
+- Write-Host $gdnFolder
+- ExitWithExitCode 0
+-} catch [System.Net.WebException] { } # Catch and ignore webexception
++
+ try {
+ # if the folder does not exist, we'll do a guardian init and push it to the remote repository
+ Write-Host 'Initializing Guardian...'
+@@ -57,7 +46,6 @@ try {
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+- & $(Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Initialize gdn folder'
+ ExitWithExitCode 0
+ }
+ catch {
+diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
+index 968b39b..3bd8b29 100644
+--- a/eng/common/sdl/packages.config
++++ b/eng/common/sdl/packages.config
+@@ -1,4 +1,4 @@
+
+
+-
++
+
+diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1
+deleted file mode 100644
+index d8fd2d8..0000000
+--- a/eng/common/sdl/push-gdn.ps1
++++ /dev/null
+@@ -1,69 +0,0 @@
+-Param(
+- [string] $Repository,
+- [string] $BranchName='master',
+- [string] $GdnFolder,
+- [string] $AzureDevOpsAccessToken,
+- [string] $PushReason
+-)
+-
+-$ErrorActionPreference = 'Stop'
+-Set-StrictMode -Version 2.0
+-$disableConfigureToolsetImport = $true
+-$LASTEXITCODE = 0
+-
+-try {
+- # `tools.ps1` checks $ci to perform some actions. Since the SDL
+- # scripts don't necessarily execute in the same agent that run the
+- # build.ps1/sh script this variable isn't automatically set.
+- $ci = $true
+- . $PSScriptRoot\..\tools.ps1
+-
+- # We create the temp directory where we'll store the sdl-config repository
+- $sdlDir = Join-Path $env:TEMP 'sdl'
+- if (Test-Path $sdlDir) {
+- Remove-Item -Force -Recurse $sdlDir
+- }
+-
+- Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+- git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+- if ($LASTEXITCODE -ne 0) {
+- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE."
+- ExitWithExitCode $LASTEXITCODE
+- }
+- # We copy the .gdn folder from our local run into the git repository so it can be committed
+- $sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn'
+- if (Get-Command Robocopy) {
+- Robocopy /S $GdnFolder $sdlRepositoryFolder
+- } else {
+- rsync -r $GdnFolder $sdlRepositoryFolder
+- }
+- # cd to the sdl-config directory so we can run git there
+- Push-Location $sdlDir
+- # git add . --> git commit --> git push
+- Write-Host 'git add .'
+- git add .
+- if ($LASTEXITCODE -ne 0) {
+- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
+- ExitWithExitCode $LASTEXITCODE
+- }
+- Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+- git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+- if ($LASTEXITCODE -ne 0) {
+- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
+- ExitWithExitCode $LASTEXITCODE
+- }
+- Write-Host 'git push'
+- git push
+- if ($LASTEXITCODE -ne 0) {
+- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
+- ExitWithExitCode $LASTEXITCODE
+- }
+-
+- # Return to the original directory
+- Pop-Location
+-}
+-catch {
+- Write-Host $_.ScriptStackTrace
+- Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+- ExitWithExitCode 1
+-}
+diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
+index fe95ab3..3d9c87a 100644
+--- a/eng/common/sdl/run-sdl.ps1
++++ b/eng/common/sdl/run-sdl.ps1
+@@ -13,7 +13,7 @@ Param(
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+-$LASTEXITCODE = 0
++$global:LASTEXITCODE = 0
+
+ try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
+index c64c4f5..4a32181 100644
+--- a/eng/common/templates/job/execute-sdl.yml
++++ b/eng/common/templates/job/execute-sdl.yml
+@@ -45,6 +45,7 @@ jobs:
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
++ checkDownloadedFiles: true
+ - ${{ if eq(parameters.artifactNames, '') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+@@ -57,6 +58,7 @@ jobs:
+ downloadType: specific files
+ itemPattern: "**"
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
++ checkDownloadedFiles: true
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+@@ -83,7 +85,7 @@ jobs:
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1
+- -GuardianPackageName Microsoft.Guardian.Cli.win10-x64.0.20.1
++ -GuardianPackageName Microsoft.Guardian.Cli.0.53.3
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
+index e78ed9a..8669679 100644
+--- a/eng/common/templates/job/job.yml
++++ b/eng/common/templates/job/job.yml
+@@ -24,9 +24,9 @@ parameters:
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+- useBuildManifest: false
+ mergeTestResults: false
+ testRunTitle: ''
++ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ runAsPublic: false
+@@ -131,8 +131,8 @@ jobs:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+- languages: 'csharp'
+- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'prod') }}
++ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
++ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ continueOnError: true
+
+@@ -202,9 +202,9 @@ jobs:
+ continueOnError: true
+ condition: always()
+
+- - ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
++ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+- displayName: Publish Test Results
++ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+@@ -213,6 +213,17 @@ jobs:
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
++ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
++ - task: PublishTestResults@2
++ displayName: Publish TRX Test Results
++ inputs:
++ testResultsFormat: 'VSTest'
++ testResultsFiles: '*.trx'
++ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
++ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
++ mergeTestResults: ${{ parameters.mergeTestResults }}
++ continueOnError: true
++ condition: always()
+
+ - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+@@ -231,12 +242,3 @@ jobs:
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+-
+- - ${{ if eq(parameters.useBuildManifest, true) }}:
+- - task: PublishBuildArtifacts@1
+- displayName: Publish Build Manifest
+- inputs:
+- PathToPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/manifest.props'
+- PublishLocation: Container
+- ArtifactName: BuildManifests
+- continueOnError: ${{ parameters.continueOnError }}
+diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
+new file mode 100644
+index 0000000..e8bc77d
+--- /dev/null
++++ b/eng/common/templates/job/onelocbuild.yml
+@@ -0,0 +1,93 @@
++parameters:
++ # Optional: dependencies of the job
++ dependsOn: ''
++
++ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
++ pool:
++ vmImage: vs2017-win2016
++
++ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
++ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
++
++ SourcesDirectory: $(Build.SourcesDirectory)
++ CreatePr: true
++ AutoCompletePr: false
++ UseLfLineEndings: true
++ UseCheckedInLocProjectJson: false
++ LanguageSet: VS_Main_Languages
++ LclSource: lclFilesInRepo
++ LclPackageId: ''
++ RepoType: gitHub
++ GitHubOrg: dotnet
++ MirrorRepo: ''
++ MirrorBranch: main
++ condition: ''
++
++jobs:
++- job: OneLocBuild
++
++ dependsOn: ${{ parameters.dependsOn }}
++
++ displayName: OneLocBuild
++
++ pool: ${{ parameters.pool }}
++
++ variables:
++ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
++ - name: _GenerateLocProjectArguments
++ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
++ -LanguageSet "${{ parameters.LanguageSet }}"
++ -CreateNeutralXlfs
++ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
++ - name: _GenerateLocProjectArguments
++ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
++
++
++ steps:
++ - task: Powershell@2
++ inputs:
++ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
++ arguments: $(_GenerateLocProjectArguments)
++ displayName: Generate LocProject.json
++ condition: ${{ parameters.condition }}
++
++ - task: OneLocBuild@2
++ displayName: OneLocBuild
++ env:
++ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
++ inputs:
++ locProj: eng/Localize/LocProject.json
++ outDir: $(Build.ArtifactStagingDirectory)
++ lclSource: ${{ parameters.LclSource }}
++ lclPackageId: ${{ parameters.LclPackageId }}
++ isCreatePrSelected: ${{ parameters.CreatePr }}
++ ${{ if eq(parameters.CreatePr, true) }}:
++ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
++ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
++ packageSourceAuth: patAuth
++ patVariable: ${{ parameters.CeapexPat }}
++ ${{ if eq(parameters.RepoType, 'gitHub') }}:
++ repoType: ${{ parameters.RepoType }}
++ gitHubPatVariable: "${{ parameters.GithubPat }}"
++ ${{ if ne(parameters.MirrorRepo, '') }}:
++ isMirrorRepoSelected: true
++ gitHubOrganization: ${{ parameters.GitHubOrg }}
++ mirrorRepo: ${{ parameters.MirrorRepo }}
++ mirrorBranch: ${{ parameters.MirrorBranch }}
++ condition: ${{ parameters.condition }}
++
++ - task: PublishBuildArtifacts@1
++ displayName: Publish Localization Files
++ inputs:
++ PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
++ PublishLocation: Container
++ ArtifactName: Loc
++ condition: ${{ parameters.condition }}
++
++ - task: PublishBuildArtifacts@1
++ displayName: Publish LocProject.json
++ inputs:
++ PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
++ PublishLocation: Container
++ ArtifactName: Loc
++ condition: ${{ parameters.condition }}
+\ No newline at end of file
+diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml
+deleted file mode 100644
+index f877fd7..0000000
+--- a/eng/common/templates/job/performance.yml
++++ /dev/null
+@@ -1,95 +0,0 @@
+-parameters:
+- steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
+- variables: [] # optional -- list of additional variables to send to the template
+- jobName: '' # required -- job name
+- displayName: '' # optional -- display name for the job. Will use jobName if not passed
+- pool: '' # required -- name of the Build pool
+- container: '' # required -- name of the container
+- osGroup: '' # required -- operating system for the job
+- extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
+- frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
+- continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+- dependsOn: '' # optional -- dependencies of the job
+- timeoutInMinutes: 320 # optional -- timeout for the job
+- enableTelemetry: false # optional -- enable for telemetry
+-
+-jobs:
+-- template: ../jobs/jobs.yml
+- parameters:
+- dependsOn: ${{ parameters.dependsOn }}
+- enableTelemetry: ${{ parameters.enableTelemetry }}
+- enablePublishBuildArtifacts: true
+- continueOnError: ${{ parameters.continueOnError }}
+-
+- jobs:
+- - job: '${{ parameters.jobName }}'
+-
+- ${{ if ne(parameters.displayName, '') }}:
+- displayName: '${{ parameters.displayName }}'
+- ${{ if eq(parameters.displayName, '') }}:
+- displayName: '${{ parameters.jobName }}'
+-
+- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+-
+- variables:
+-
+- - ${{ each variable in parameters.variables }}:
+- - ${{ if ne(variable.name, '') }}:
+- - name: ${{ variable.name }}
+- value: ${{ variable.value }}
+- - ${{ if ne(variable.group, '') }}:
+- - group: ${{ variable.group }}
+-
+- - IsInternal: ''
+- - HelixApiAccessToken: ''
+- - HelixPreCommand: ''
+-
+- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+- - ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
+- - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
+- - IsInternal: -Internal
+- - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+- - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
+- - IsInternal: --internal
+-
+- - group: DotNet-HelixApi-Access
+- - group: dotnet-benchview
+-
+- workspace:
+- clean: all
+- pool:
+- ${{ parameters.pool }}
+- container: ${{ parameters.container }}
+- strategy:
+- matrix:
+- ${{ each framework in parameters.frameworks }}:
+- ${{ framework }}:
+- _Framework: ${{ framework }}
+- steps:
+- - checkout: self
+- clean: true
+- # Run all of the steps to setup repo
+- - ${{ each step in parameters.steps }}:
+- - ${{ step }}
+- - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
+- displayName: Performance Setup (Windows)
+- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+- continueOnError: ${{ parameters.continueOnError }}
+- - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
+- displayName: Performance Setup (Unix)
+- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+- continueOnError: ${{ parameters.continueOnError }}
+- - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
+- displayName: Run ci setup script
+- # Run perf testing in helix
+- - template: /eng/common/templates/steps/perf-send-to-helix.yml
+- parameters:
+- HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
+- HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
+- HelixAccessToken: $(HelixApiAccessToken)
+- HelixTargetQueues: $(Queue)
+- HelixPreCommands: $(HelixPreCommand)
+- Creator: $(Creator)
+- WorkItemTimeout: 4:00 # 4 hours
+- WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
+- CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
+\ No newline at end of file
+diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
+index d0c3cc2..3b9e252 100644
+--- a/eng/common/templates/job/publish-build-assets.yml
++++ b/eng/common/templates/job/publish-build-assets.yml
+@@ -37,6 +37,7 @@ jobs:
+ - name: _BuildConfig
+ value: ${{ parameters.configuration }}
+ - group: Publish-Build-Assets
++ - group: AzureDevOps-Artifact-Feeds-Pats
+ # Skip component governance and codesign validation for SDL. These jobs
+ # create no content.
+ - name: skipComponentGovernanceDetection
+@@ -51,12 +52,19 @@ jobs:
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
++ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
++ - task: PowerShell@2
++ displayName: Enable cross-org NuGet feed authentication
++ inputs:
++ filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
++ arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
++
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
+index 9332f5e..5023d36 100644
+--- a/eng/common/templates/job/source-build.yml
++++ b/eng/common/templates/job/source-build.yml
+@@ -15,6 +15,9 @@ parameters:
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
++ # skipPublishValidation: false
++ # Disables publishing validation. By default, a check is performed to ensure no packages are
++ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+@@ -28,6 +31,11 @@ parameters:
+ # container and pool.
+ platform: {}
+
++ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
++ # source-build builds run in Docker, including the default managed platform.
++ defaultContainerHostPool:
++ vmImage: ubuntu-20.04
++
+ jobs:
+ - job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+@@ -37,6 +45,9 @@ jobs:
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
++
++ ${{ if eq(parameters.platform.pool, '') }}:
++ pool: ${{ parameters.defaultContainerHostPool }}
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
+new file mode 100644
+index 0000000..b58d423
+--- /dev/null
++++ b/eng/common/templates/job/source-index-stage1.yml
+@@ -0,0 +1,62 @@
++parameters:
++ runAsPublic: false
++ sourceIndexPackageVersion: 1.0.1-20210614.1
++ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
++ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
++ preSteps: []
++ binlogPath: artifacts/log/Debug/Build.binlog
++ pool:
++ vmImage: vs2017-win2016
++ condition: ''
++ dependsOn: ''
++
++jobs:
++- job: SourceIndexStage1
++ dependsOn: ${{ parameters.dependsOn }}
++ condition: ${{ parameters.condition }}
++ variables:
++ - name: SourceIndexPackageVersion
++ value: ${{ parameters.sourceIndexPackageVersion }}
++ - name: SourceIndexPackageSource
++ value: ${{ parameters.sourceIndexPackageSource }}
++ - name: BinlogPath
++ value: ${{ parameters.binlogPath }}
++ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
++ - group: source-dot-net stage1 variables
++
++ pool: ${{ parameters.pool }}
++ steps:
++ - ${{ each preStep in parameters.preSteps }}:
++ - ${{ preStep }}
++
++ - task: UseDotNet@2
++ displayName: Use .NET Core sdk 3.1
++ inputs:
++ packageType: sdk
++ version: 3.1.x
++
++ - task: UseDotNet@2
++ displayName: Use .NET Core sdk
++ inputs:
++ useGlobalJson: true
++
++ - script: |
++ dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools
++ dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools
++ echo ##vso[task.prependpath]$(Build.SourcesDirectory)/.source-index/tools
++ displayName: Download Tools
++
++ - script: ${{ parameters.sourceIndexBuildCommand }}
++ displayName: Build Repository
++
++ - script: BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
++ displayName: Process Binlog into indexable sln
++ env:
++ DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2
++
++ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
++ - script: UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
++ displayName: Upload stage1 artifacts to source index
++ env:
++ BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
++ DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2
+diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
+index 0884595..a1f8fce 100644
+--- a/eng/common/templates/jobs/jobs.yml
++++ b/eng/common/templates/jobs/jobs.yml
+@@ -7,7 +7,14 @@ parameters:
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+-
++
++ # Optional: Enable running the source-build jobs to build repo from source
++ enableSourceBuild: false
++
++ # Optional: Parameters for source-build template.
++ # See /eng/common/templates/jobs/source-build.yml for options
++ sourceBuildParameters: []
++
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+@@ -24,12 +31,8 @@ parameters:
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+- # Optional: Enable running the source-build jobs to build repo from source
+- runSourceBuild: false
+-
+- # Optional: Parameters for source-build template.
+- # See /eng/common/templates/jobs/source-build.yml for options
+- sourceBuildParameters: []
++ enableSourceIndex: false
++ sourceIndexParams: {}
+
+ # Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+ # and some (Microbuild) should only be applied to non-PR cases for internal builds.
+@@ -50,14 +53,22 @@ jobs:
+
+ name: ${{ job.job }}
+
+-- ${{ if eq(parameters.runSourceBuild, true) }}:
++- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/templates/jobs/source-build.yml
+ parameters:
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
++- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
++ - template: ../job/source-index-stage1.yml
++ parameters:
++ runAsPublic: ${{ parameters.runAsPublic }}
++ ${{ each parameter in parameters.sourceIndexParams }}:
++ ${{ parameter.key }}: ${{ parameter.value }}
++
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
++
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+@@ -69,7 +80,7 @@ jobs:
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+- - ${{ if eq(parameters.runSourceBuild, true) }}:
++ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+ pool:
+ vmImage: vs2017-win2016
+diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
+index f463011..00aa98e 100644
+--- a/eng/common/templates/jobs/source-build.yml
++++ b/eng/common/templates/jobs/source-build.yml
+@@ -11,16 +11,14 @@ parameters:
+ # See /eng/common/templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+- # If changed to true, causes this template to include the default platform for a managed-only
+- # repo. The exact Docker image used for this build will be provided by Arcade. This has some risk,
+- # but since the repo is supposed to be managed-only, the risk should be very low.
+- includeDefaultManagedPlatform: false
++ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+- # object in this array is sent to the job template as 'platform'.
++ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
++ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+ jobs:
+@@ -32,7 +30,7 @@ jobs:
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+- - ${{ if eq(parameters.includeDefaultManagedPlatform, true) }}:
++ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+ - ${{ each platform in parameters.platforms }}:
+@@ -41,7 +39,7 @@ jobs:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+-- ${{ if eq(parameters.includeDefaultManagedPlatform, true) }}:
++- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/templates/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml
+index a0a8074..4e51e47 100644
+--- a/eng/common/templates/phases/publish-build-assets.yml
++++ b/eng/common/templates/phases/publish-build-assets.yml
+@@ -20,6 +20,7 @@ phases:
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
++ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: AzureKeyVault@1
+diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
+index 7ae5255..8990dfc 100644
+--- a/eng/common/templates/post-build/channels/generic-internal-channel.yml
++++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
+@@ -40,6 +40,9 @@ stages:
+ pool:
+ vmImage: 'windows-2019'
+ steps:
++ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
++ displayName: Warn about v2 Arcade Publishing Usage
++
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+@@ -58,6 +61,7 @@ stages:
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
++ checkDownloadedFiles: true
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+@@ -109,6 +113,9 @@ stages:
+ pool:
+ vmImage: 'windows-2019'
+ steps:
++ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
++ displayName: Warn about v2 Arcade Publishing Usage
++
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+@@ -124,6 +131,7 @@ stages:
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
++ checkDownloadedFiles: true
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
+index 6cf39db..3220c6a 100644
+--- a/eng/common/templates/post-build/channels/generic-public-channel.yml
++++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
+@@ -42,6 +42,9 @@ stages:
+ pool:
+ vmImage: 'windows-2019'
+ steps:
++ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
++ displayName: Warn about v2 Arcade Publishing Usage
++
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+@@ -56,6 +59,7 @@ stages:
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
++ checkDownloadedFiles: true
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+@@ -108,6 +112,9 @@ stages:
+ pool:
+ vmImage: 'windows-2019'
+ steps:
++ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
++ displayName: Warn about v2 Arcade Publishing Usage
++
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+@@ -123,6 +130,7 @@ stages:
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
++ checkDownloadedFiles: true
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
+index df06f53..4f79cf0 100644
+--- a/eng/common/templates/post-build/post-build.yml
++++ b/eng/common/templates/post-build/post-build.yml
+@@ -32,7 +32,6 @@ parameters:
+ symbolPublishingAdditionalParameters: ''
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+- useBuildManifest: false
+
+ # Which stages should finish execution before post-build stages start
+ validateDependsOn:
+@@ -54,9 +53,6 @@ parameters:
+ NETCoreExperimentalChannelId: 562
+ NetEngServicesIntChannelId: 678
+ NetEngServicesProdChannelId: 679
+- Net5Preview8ChannelId: 1155
+- Net5RC1ChannelId: 1157
+- Net5RC2ChannelId: 1329
+ NetCoreSDK313xxChannelId: 759
+ NetCoreSDK313xxInternalChannelId: 760
+ NetCoreSDK314xxChannelId: 921
+@@ -65,177 +61,180 @@ parameters:
+ VS167ChannelId: 1011
+ VS168ChannelId: 1154
+ VSMasterChannelId: 1012
+-
+-stages:
+-- stage: Validate
+- dependsOn: ${{ parameters.validateDependsOn }}
+- displayName: Validate Build Assets
+- variables:
+- - template: common-variables.yml
+- jobs:
+- - template: setup-maestro-vars.yml
+- parameters:
+- BARBuildId: ${{ parameters.BARBuildId }}
+- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
++ VS169ChannelId: 1473
++ VS1610ChannelId: 1692
+
+- - job:
+- displayName: Post-build Checks
+- dependsOn: setupMaestroVars
+- variables:
+- - name: TargetChannels
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'] ]
+- pool:
+- vmImage: 'windows-2019'
+- steps:
+- - task: PowerShell@2
+- displayName: Maestro Channels Consistency
+- inputs:
+- filePath: $(Build.SourcesDirectory)/eng/common/post-build/check-channel-consistency.ps1
+- arguments: -PromoteToChannels "$(TargetChannels)"
+- -AvailableChannelIds ${{parameters.NetEngLatestChannelId}},${{parameters.NetEngValidationChannelId}},${{parameters.NetDev5ChannelId}},${{parameters.NetDev6ChannelId}},${{parameters.GeneralTestingChannelId}},${{parameters.NETCoreToolingDevChannelId}},${{parameters.NETCoreToolingReleaseChannelId}},${{parameters.NETInternalToolingChannelId}},${{parameters.NETCoreExperimentalChannelId}},${{parameters.NetEngServicesIntChannelId}},${{parameters.NetEngServicesProdChannelId}},${{parameters.Net5Preview8ChannelId}},${{parameters.Net5RC1ChannelId}},${{parameters.Net5RC2ChannelId}},${{parameters.NetCoreSDK313xxChannelId}},${{parameters.NetCoreSDK313xxInternalChannelId}},${{parameters.NetCoreSDK314xxChannelId}},${{parameters.NetCoreSDK314xxInternalChannelId}},${{parameters.VS166ChannelId}},${{parameters.VS167ChannelId}},${{parameters.VS168ChannelId}},${{parameters.VSMasterChannelId}}
+-
+- - job:
+- displayName: NuGet Validation
+- dependsOn: setupMaestroVars
+- condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
+- pool:
+- vmImage: 'windows-2019'
+- variables:
+- - name: AzDOProjectName
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+- - name: AzDOPipelineId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+- - name: AzDOBuildId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+- steps:
+- - task: DownloadBuildArtifacts@0
+- displayName: Download Package Artifacts
+- inputs:
+- buildType: specific
+- buildVersionToDownload: specific
+- project: $(AzDOProjectName)
+- pipeline: $(AzDOPipelineId)
+- buildId: $(AzDOBuildId)
+- artifactName: PackageArtifacts
+-
+- - task: PowerShell@2
+- displayName: Validate
+- inputs:
+- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+-
+- - job:
+- displayName: Signing Validation
+- dependsOn: setupMaestroVars
+- condition: eq( ${{ parameters.enableSigningValidation }}, 'true')
++stages:
++- ${{ if or(and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')), eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
++ - stage: Validate
++ dependsOn: ${{ parameters.validateDependsOn }}
++ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
+- - name: AzDOProjectName
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+- - name: AzDOPipelineId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+- - name: AzDOBuildId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+- pool:
+- vmImage: 'windows-2019'
+- steps:
+- - ${{ if eq(parameters.useBuildManifest, true) }}:
++ jobs:
++ - template: setup-maestro-vars.yml
++ parameters:
++ BARBuildId: ${{ parameters.BARBuildId }}
++ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
++
++ - ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
++ - job:
++ displayName: Post-build Checks
++ dependsOn: setupMaestroVars
++ variables:
++ - name: TargetChannels
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'] ]
++ pool:
++ vmImage: 'windows-2019'
++ steps:
++ - task: PowerShell@2
++ displayName: Maestro Channels Consistency
++ inputs:
++ filePath: $(Build.SourcesDirectory)/eng/common/post-build/check-channel-consistency.ps1
++ arguments: -PromoteToChannels "$(TargetChannels)"
++ -AvailableChannelIds ${{parameters.NetEngLatestChannelId}},${{parameters.NetEngValidationChannelId}},${{parameters.NetDev5ChannelId}},${{parameters.NetDev6ChannelId}},${{parameters.GeneralTestingChannelId}},${{parameters.NETCoreToolingDevChannelId}},${{parameters.NETCoreToolingReleaseChannelId}},${{parameters.NETInternalToolingChannelId}},${{parameters.NETCoreExperimentalChannelId}},${{parameters.NetEngServicesIntChannelId}},${{parameters.NetEngServicesProdChannelId}},${{parameters.NetCoreSDK313xxChannelId}},${{parameters.NetCoreSDK313xxInternalChannelId}},${{parameters.NetCoreSDK314xxChannelId}},${{parameters.NetCoreSDK314xxInternalChannelId}},${{parameters.VS166ChannelId}},${{parameters.VS167ChannelId}},${{parameters.VS168ChannelId}},${{parameters.VSMasterChannelId}},${{parameters.VS169ChannelId}},${{parameters.VS1610ChannelId}}
++
++ - job:
++ displayName: NuGet Validation
++ dependsOn: setupMaestroVars
++ condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
++ pool:
++ vmImage: 'windows-2019'
++ variables:
++ - name: AzDOProjectName
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
++ - name: AzDOPipelineId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
++ - name: AzDOBuildId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
++ steps:
+ - task: DownloadBuildArtifacts@0
+- displayName: Download build manifest
++ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+- artifactName: BuildManifests
+- - task: DownloadBuildArtifacts@0
+- displayName: Download Package Artifacts
+- inputs:
+- buildType: specific
+- buildVersionToDownload: specific
+- project: $(AzDOProjectName)
+- pipeline: $(AzDOPipelineId)
+- buildId: $(AzDOBuildId)
+- artifactName: PackageArtifacts
+-
+- # This is necessary whenever we want to publish/restore to an AzDO private feed
+- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+- # otherwise it'll complain about accessing a private feed.
+- - task: NuGetAuthenticate@0
+- displayName: 'Authenticate to AzDO Feeds'
+-
+- - task: PowerShell@2
+- displayName: Enable cross-org publishing
+- inputs:
+- filePath: eng\common\enable-cross-org-publishing.ps1
+- arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+-
+- # Signing validation will optionally work with the buildmanifest file which is downloaded from
+- # Azure DevOps above.
+- - task: PowerShell@2
+- displayName: Validate
+- inputs:
+- filePath: eng\common\sdk-task.ps1
+- arguments: -task SigningValidation -restore -msbuildEngine vs
+- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+- ${{ parameters.signingValidationAdditionalParameters }}
+-
+- - template: ../steps/publish-logs.yml
+- parameters:
+- StageLabel: 'Validation'
+- JobLabel: 'Signing'
+-
+- - job:
+- displayName: SourceLink Validation
+- dependsOn: setupMaestroVars
+- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+- variables:
+- - template: common-variables.yml
+- - name: AzDOProjectName
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+- - name: AzDOPipelineId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+- - name: AzDOBuildId
+- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+- pool:
+- vmImage: 'windows-2019'
+- steps:
+- - task: DownloadBuildArtifacts@0
+- displayName: Download Blob Artifacts
+- inputs:
+- buildType: specific
+- buildVersionToDownload: specific
+- project: $(AzDOProjectName)
+- pipeline: $(AzDOPipelineId)
+- buildId: $(AzDOBuildId)
+- artifactName: BlobArtifacts
+-
+- - task: PowerShell@2
+- displayName: Validate
+- inputs:
+- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+- -ExtractPath $(Agent.BuildDirectory)/Extract/
+- -GHRepoName $(Build.Repository.Name)
+- -GHCommit $(Build.SourceVersion)
+- -SourcelinkCliVersion $(SourceLinkCLIVersion)
+- continueOnError: true
+-
+- - template: /eng/common/templates/job/execute-sdl.yml
+- parameters:
+- enable: ${{ parameters.SDLValidationParameters.enable }}
++ artifactName: PackageArtifacts
++ checkDownloadedFiles: true
++
++ - task: PowerShell@2
++ displayName: Validate
++ inputs:
++ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
++ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
++ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
++
++ - job:
++ displayName: Signing Validation
+ dependsOn: setupMaestroVars
+- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
+- downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
++ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
++ variables:
++ - template: common-variables.yml
++ - name: AzDOProjectName
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
++ - name: AzDOPipelineId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
++ - name: AzDOBuildId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
++ pool:
++ vmImage: 'windows-2019'
++ steps:
++ - task: DownloadBuildArtifacts@0
++ displayName: Download Package Artifacts
++ inputs:
++ buildType: specific
++ buildVersionToDownload: specific
++ project: $(AzDOProjectName)
++ pipeline: $(AzDOPipelineId)
++ buildId: $(AzDOBuildId)
++ artifactName: PackageArtifacts
++ checkDownloadedFiles: true
++ itemPattern: |
++ **
++ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
++
++ # This is necessary whenever we want to publish/restore to an AzDO private feed
++ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
++ # otherwise it'll complain about accessing a private feed.
++ - task: NuGetAuthenticate@0
++ displayName: 'Authenticate to AzDO Feeds'
++
++ - task: PowerShell@2
++ displayName: Enable cross-org publishing
++ inputs:
++ filePath: eng\common\enable-cross-org-publishing.ps1
++ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
++
++ # Signing validation will optionally work with the buildmanifest file which is downloaded from
++ # Azure DevOps above.
++ - task: PowerShell@2
++ displayName: Validate
++ inputs:
++ filePath: eng\common\sdk-task.ps1
++ arguments: -task SigningValidation -restore -msbuildEngine vs
++ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
++ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
++ ${{ parameters.signingValidationAdditionalParameters }}
++
++ - template: ../steps/publish-logs.yml
++ parameters:
++ StageLabel: 'Validation'
++ JobLabel: 'Signing'
++
++ - job:
++ displayName: SourceLink Validation
++ dependsOn: setupMaestroVars
++ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
++ variables:
++ - template: common-variables.yml
++ - name: AzDOProjectName
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
++ - name: AzDOPipelineId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
++ - name: AzDOBuildId
++ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
++ pool:
++ vmImage: 'windows-2019'
++ steps:
++ - task: DownloadBuildArtifacts@0
++ displayName: Download Blob Artifacts
++ inputs:
++ buildType: specific
++ buildVersionToDownload: specific
++ project: $(AzDOProjectName)
++ pipeline: $(AzDOPipelineId)
++ buildId: $(AzDOBuildId)
++ artifactName: BlobArtifacts
++ checkDownloadedFiles: true
++
++ - task: PowerShell@2
++ displayName: Validate
++ inputs:
++ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
++ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
++ -ExtractPath $(Agent.BuildDirectory)/Extract/
++ -GHRepoName $(Build.Repository.Name)
++ -GHCommit $(Build.SourceVersion)
++ -SourcelinkCliVersion $(SourceLinkCLIVersion)
++ continueOnError: true
++
++ - template: /eng/common/templates/job/execute-sdl.yml
++ parameters:
++ enable: ${{ parameters.SDLValidationParameters.enable }}
++ dependsOn: setupMaestroVars
++ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
++ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
++ artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
++ downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
+
+ - ${{ if or(ge(parameters.publishingInfraVersion, 3), eq(parameters.inline, 'false')) }}:
+ - stage: publish_using_darc
+- dependsOn: Validate
++ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
++ dependsOn: ${{ parameters.publishDependsOn }}
++ ${{ if and(ne(parameters.enableNugetValidation, 'true'), ne(parameters.enableSigningValidation, 'true'), ne(parameters.enableSourceLinkValidation, 'true'), ne(parameters.SDLValidationParameters.enable, 'true')) }}:
++ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+@@ -248,6 +247,7 @@ stages:
+ - job:
+ displayName: Publish Using Darc
+ dependsOn: setupMaestroVars
++ timeoutInMinutes: 120
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+@@ -264,6 +264,8 @@ stages:
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish ${{ parameters.waitPublishingFinish }}
+ -PublishInstallersAndChecksums ${{ parameters.publishInstallersAndChecksums }}
++ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
++ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+@@ -298,54 +300,6 @@ stages:
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-symbols/nuget/v3/index.json'
+
+- - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+- parameters:
+- BARBuildId: ${{ parameters.BARBuildId }}
+- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+- dependsOn: ${{ parameters.publishDependsOn }}
+- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+- stageName: 'Net5_Preview8_Publish'
+- channelName: '.NET 5 Preview 8'
+- akaMSChannelName: 'net5/preview8'
+- channelId: ${{ parameters.Net5Preview8ChannelId }}
+- transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v3/index.json'
+- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v3/index.json'
+- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-symbols/nuget/v3/index.json'
+-
+- - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+- parameters:
+- BARBuildId: ${{ parameters.BARBuildId }}
+- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+- dependsOn: ${{ parameters.publishDependsOn }}
+- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+- stageName: 'Net5_RC1_Publish'
+- channelName: '.NET 5 RC 1'
+- akaMSChannelName: 'net5/rc1'
+- channelId: ${{ parameters.Net5RC1ChannelId }}
+- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+-
+- - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+- parameters:
+- BARBuildId: ${{ parameters.BARBuildId }}
+- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+- dependsOn: ${{ parameters.publishDependsOn }}
+- publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+- symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+- stageName: 'Net5_RC2_Publish'
+- channelName: '.NET 5 RC 2'
+- akaMSChannelName: 'net5/rc2'
+- channelId: ${{ parameters.Net5RC2ChannelId }}
+- transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+- shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+- symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+-
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+@@ -603,3 +557,33 @@ stages:
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
++
++ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
++ parameters:
++ BARBuildId: ${{ parameters.BARBuildId }}
++ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
++ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
++ dependsOn: ${{ parameters.publishDependsOn }}
++ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
++ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
++ stageName: 'VS_16_9_Publishing'
++ channelName: 'VS 16.9'
++ channelId: ${{ parameters.VS169ChannelId }}
++ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
++ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
++ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
++
++ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
++ parameters:
++ BARBuildId: ${{ parameters.BARBuildId }}
++ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
++ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
++ dependsOn: ${{ parameters.publishDependsOn }}
++ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
++ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
++ stageName: 'VS_16_10_Publishing'
++ channelName: 'VS 16.10'
++ channelId: ${{ parameters.VS1610ChannelId }}
++ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
++ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
++ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
+index d0cbfb6..4a22b2e 100644
+--- a/eng/common/templates/post-build/setup-maestro-vars.yml
++++ b/eng/common/templates/post-build/setup-maestro-vars.yml
+@@ -18,6 +18,7 @@ jobs:
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
++ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
+deleted file mode 100644
+index 8427de5..0000000
+--- a/eng/common/templates/steps/perf-send-to-helix.yml
++++ /dev/null
+@@ -1,50 +0,0 @@
+-# Please remember to update the documentation if you make changes to these parameters!
+-parameters:
+- ProjectFile: '' # required -- project file that specifies the helix workitems
+- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+- HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+- DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+- EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+- DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
+- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+- osGroup: '' # required -- operating system for the job
+-
+-
+-steps:
+-- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
+- parameters:
+- osGroup: ${{ parameters.osGroup }}
+- sendParams: $(Build.SourcesDirectory)/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog
+- displayName: ${{ parameters.DisplayNamePrefix }}
+- condition: ${{ parameters.condition }}
+- continueOnError: ${{ parameters.continueOnError }}
+- environment:
+- BuildConfig: $(_BuildConfig)
+- HelixSource: ${{ parameters.HelixSource }}
+- HelixType: ${{ parameters.HelixType }}
+- HelixBuild: ${{ parameters.HelixBuild }}
+- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+- HelixAccessToken: ${{ parameters.HelixAccessToken }}
+- HelixPreCommands: ${{ parameters.HelixPreCommands }}
+- HelixPostCommands: ${{ parameters.HelixPostCommands }}
+- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+- EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+- Creator: ${{ parameters.Creator }}
+- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
+index c6ddbd8..cd02ae1 100644
+--- a/eng/common/templates/steps/send-to-helix.yml
++++ b/eng/common/templates/steps/send-to-helix.yml
+@@ -18,8 +18,8 @@ parameters:
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+- DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
+- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
++ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
++ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+@@ -30,14 +30,40 @@ parameters:
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+ steps:
+-- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
+- parameters:
+- osGroup: ${{ variables['Agent.Os'] }}
+- sendParams: $(Build.SourcesDirectory)/eng/common/helixpublish.proj /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog
++ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+- condition: ${{ parameters.condition }}
++ env:
++ BuildConfig: $(_BuildConfig)
++ HelixSource: ${{ parameters.HelixSource }}
++ HelixType: ${{ parameters.HelixType }}
++ HelixBuild: ${{ parameters.HelixBuild }}
++ HelixConfiguration: ${{ parameters.HelixConfiguration }}
++ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
++ HelixAccessToken: ${{ parameters.HelixAccessToken }}
++ HelixPreCommands: ${{ parameters.HelixPreCommands }}
++ HelixPostCommands: ${{ parameters.HelixPostCommands }}
++ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
++ WorkItemCommand: ${{ parameters.WorkItemCommand }}
++ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
++ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
++ XUnitProjects: ${{ parameters.XUnitProjects }}
++ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
++ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
++ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
++ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
++ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
++ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
++ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
++ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
++ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
++ HelixBaseUri: ${{ parameters.HelixBaseUri }}
++ Creator: ${{ parameters.Creator }}
++ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
++ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+- environment:
++ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
++ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
++ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+@@ -64,3 +90,5 @@ steps:
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
++ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
++ continueOnError: ${{ parameters.continueOnError }}
+diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
+index 8e336b7..e20637e 100644
+--- a/eng/common/templates/steps/source-build.yml
++++ b/eng/common/templates/steps/source-build.yml
+@@ -34,9 +34,14 @@ steps:
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
++ publishArgs=
++ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
++ publishArgs='--publish'
++ fi
++
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+- --restore --build --pack --publish \
++ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $targetRidArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
+index 6006608..5619c7a 100644
+--- a/eng/common/tools.ps1
++++ b/eng/common/tools.ps1
+@@ -48,6 +48,9 @@
+ # True to use global NuGet cache instead of restoring packages to repository-local directory.
+ [bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
+
++# True to exclude prerelease versions Visual Studio during build
++[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false }
++
+ # An array of names of processes to stop on script exit if prepareMachine is true.
+ $processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
+
+@@ -57,15 +60,11 @@ set-strictmode -version 2.0
+ $ErrorActionPreference = 'Stop'
+ [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
+
+-# If specified, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first.
++# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first.
+ [string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null }
+ # Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
+ [string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
+
+-# If false, use copy of dotnet-install from /eng/common/dotnet-install-scripts (for custom behaviors).
+-# otherwise will fetch from public location.
+-[bool]$useDefaultDotnetInstall = if (Test-Path variable:useDefaultDotnetInstall) { $useDefaultDotnetInstall } else { $false }
+-
+ function Create-Directory ([string[]] $path) {
+ New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
+ }
+@@ -145,7 +144,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+- if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
++ if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
+ $dotnetRoot = $env:DOTNET_INSTALL_DIR
+ } else {
+ $dotnetRoot = Join-Path $RepoRoot '.dotnet'
+@@ -173,7 +172,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+ Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
+
+ try {
+- Rename-Item -Force -Path $sdkCacheFileTemp 'sdk.txt'
++ Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt')
+ } catch {
+ # Somebody beat us
+ Remove-Item -Path $sdkCacheFileTemp
+@@ -194,49 +193,44 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+ return $global:_DotNetInstallDir = $dotnetRoot
+ }
+
+-function GetDotNetInstallScript([string] $dotnetRoot) {
+- $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+- if (!(Test-Path $installScript)) {
+- create-directory $dotnetroot
++function Retry($downloadBlock, $maxRetries = 5) {
++ $retries = 1
+
+- if ($useDefaultDotnetInstall)
+- {
+- $progresspreference = 'silentlycontinue' # don't display the console progress ui - it's a huge perf hit
++ while($true) {
++ try {
++ & $downloadBlock
++ break
++ }
++ catch {
++ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
++ }
+
+- $maxretries = 5
+- $retries = 1
++ if (++$retries -le $maxRetries) {
++ $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
++ Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
++ Start-Sleep -Seconds $delayInSeconds
++ }
++ else {
++ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
++ break
++ }
+
+- $uri = "https://dot.net/$dotnetinstallscriptversion/dotnet-install.ps1"
++ }
++}
+
+- while($true) {
+- try {
+- write-host "get $uri"
+- invoke-webrequest $uri -outfile $installscript
+- break
+- }
+- catch {
+- write-host "failed to download '$uri'"
+- write-error $_.exception.message -erroraction continue
+- }
++function GetDotNetInstallScript([string] $dotnetRoot) {
++ $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
++ if (!(Test-Path $installScript)) {
++ Create-Directory $dotnetRoot
++ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
++ $uri = "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1"
+
+- if (++$retries -le $maxretries) {
+- $delayinseconds = [math]::pow(2, $retries) - 1 # exponential backoff
+- write-host "retrying. waiting for $delayinseconds seconds before next attempt ($retries of $maxretries)."
+- start-sleep -seconds $delayinseconds
+- }
+- else {
+- throw "unable to download file in $maxretries attempts."
+- }
+- }
+- }
+- else
+- {
+- # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+- # See https://github.com/dotnet/arcade/issues/6047 for details
+- $engCommonCopy = Resolve-Path (Join-Path $PSScriptRoot 'dotnet-install-scripts\dotnet-install.ps1')
+- Copy-Item $engCommonCopy -Destination $installScript -Force
+- }
++ Retry({
++ Write-Host "GET $uri"
++ Invoke-WebRequest $uri -OutFile $installScript
++ })
+ }
++
+ return $installScript
+ }
+
+@@ -318,8 +312,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
+
+ # If the version of msbuild is going to be xcopied,
+ # use this version. Version matches a package here:
+- # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.8.0-preview3&view=overview
+- $defaultXCopyMSBuildVersion = '16.8.0-preview3'
++ # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.10.0-preview2&view=overview
++ $defaultXCopyMSBuildVersion = '16.10.0-preview2'
+
+ if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
+@@ -413,9 +407,13 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
+ }
+
+ Create-Directory $packageDir
++
+ Write-Host "Downloading $packageName $packageVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+- Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
++ Retry({
++ Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
++ })
++
+ Unzip $packagePath $packageDir
+ }
+
+@@ -452,16 +450,17 @@ function LocateVisualStudio([object]$vsRequirements = $null){
+ if (!(Test-Path $vsWhereExe)) {
+ Create-Directory $vsWhereDir
+ Write-Host 'Downloading vswhere'
+- try {
++ Retry({
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+- }
+- catch {
+- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+- }
++ })
+ }
+
+ if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+- $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
++ $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
++
++ if (!$excludePrereleaseVS) {
++ $args += '-prerelease'
++ }
+
+ if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ $args += '-version'
+@@ -487,7 +486,13 @@ function LocateVisualStudio([object]$vsRequirements = $null){
+
+ function InitializeBuildTool() {
+ if (Test-Path variable:global:_BuildTool) {
+- return $global:_BuildTool
++ # If the requested msbuild parameters do not match, clear the cached variables.
++ if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) {
++ Remove-Item variable:global:_BuildTool
++ Remove-Item variable:global:_MSBuildExe
++ } else {
++ return $global:_BuildTool
++ }
+ }
+
+ if (-not $msbuildEngine) {
+@@ -506,7 +511,7 @@ function InitializeBuildTool() {
+ ExitWithExitCode 1
+ }
+ $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
+- $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' }
++ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp3.1' }
+ } elseif ($msbuildEngine -eq "vs") {
+ try {
+ $msbuildPath = InitializeVisualStudioMSBuild -install:$restore
+@@ -515,7 +520,7 @@ function InitializeBuildTool() {
+ ExitWithExitCode 1
+ }
+
+- $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
++ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS }
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ ExitWithExitCode 1
+@@ -540,13 +545,15 @@ function GetDefaultMSBuildEngine() {
+
+ function GetNuGetPackageCachePath() {
+ if ($env:NUGET_PACKAGES -eq $null) {
+- # Use local cache on CI to ensure deterministic build,
++ # Use local cache on CI to ensure deterministic build.
++ # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116
+ # use global cache in dev builds to avoid cost of downloading packages.
+ # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
+ if ($useGlobalNuGetCache) {
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
+ } else {
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
++ $env:RESTORENOCACHE = $true
+ }
+ }
+
+@@ -640,9 +647,26 @@ function MSBuild() {
+ }
+
+ $toolsetBuildProject = InitializeToolset
+- $path = Split-Path -parent $toolsetBuildProject
+- $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')
+- $args += "/logger:$path"
++ $basePath = Split-Path -parent $toolsetBuildProject
++ $possiblePaths = @(
++ # new scripts need to work with old packages, so we need to look for the old names/versions
++ (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
++ (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
++ (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
++ (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
++ )
++ $selectedPath = $null
++ foreach ($path in $possiblePaths) {
++ if (Test-Path $path -PathType Leaf) {
++ $selectedPath = $path
++ break
++ }
++ }
++ if (-not $selectedPath) {
++ Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.'
++ ExitWithExitCode 1
++ }
++ $args += "/logger:$selectedPath"
+ }
+
+ MSBuild-Core @args
+@@ -678,7 +702,10 @@ function MSBuild-Core() {
+ }
+
+ foreach ($arg in $args) {
+- if ($arg -ne $null -and $arg.Trim() -ne "") {
++ if ($null -ne $arg -and $arg.Trim() -ne "") {
++ if ($arg.EndsWith('\')) {
++ $arg = $arg + "\"
++ }
+ $cmdArgs += " `"$arg`""
+ }
+ }
+@@ -688,14 +715,23 @@ function MSBuild-Core() {
+ $exitCode = Exec-Process $buildTool.Path $cmdArgs
+
+ if ($exitCode -ne 0) {
+- Write-PipelineTelemetryError -Category 'Build' -Message 'Build failed.'
++ # We should not Write-PipelineTaskError here because that message shows up in the build summary
++ # The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
++ Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red
+
+ $buildLog = GetMSBuildBinaryLogCommandLineArgument $args
+- if ($buildLog -ne $null) {
++ if ($null -ne $buildLog) {
+ Write-Host "See log: $buildLog" -ForegroundColor DarkGray
+ }
+
+- ExitWithExitCode $exitCode
++ if ($ci) {
++ Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
++ # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
++ # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
++ ExitWithExitCode 0
++ } else {
++ ExitWithExitCode $exitCode
++ }
+ }
+ }
+
+@@ -741,7 +777,7 @@ function Get-Darc($version) {
+
+ . $PSScriptRoot\pipeline-logging-functions.ps1
+
+-$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
++$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\')
+ $EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
+ $ArtifactsDir = Join-Path $RepoRoot 'artifacts'
+ $ToolsetDir = Join-Path $ArtifactsDir 'toolset'
+diff --git a/eng/common/tools.sh b/eng/common/tools.sh
+old mode 100644
+new mode 100755
+index c722a05..05ca99c
+--- a/eng/common/tools.sh
++++ b/eng/common/tools.sh
+@@ -68,10 +68,6 @@ fi
+ runtime_source_feed=${runtime_source_feed:-''}
+ runtime_source_feed_key=${runtime_source_feed_key:-''}
+
+-# Determines if dotnet-install.sh comes from the eng/common folder or the internet
+-# (default = public version)
+-use_default_dotnet_install=${use_default_dotnet_install:-false}
+-
+ # Resolve any symlinks in the given path.
+ function ResolvePath {
+ local path=$1
+@@ -93,16 +89,16 @@ function ResolvePath {
+ function ReadGlobalVersion {
+ local key=$1
+
+- local line=$(awk "/$key/ {print; exit}" "$global_json_file")
+- local pattern="\"$key\" *: *\"(.*)\""
++ if command -v jq &> /dev/null; then
++ _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")"
++ elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then
++ _ReadGlobalVersion=${BASH_REMATCH[1]}
++ fi
+
+- if [[ ! $line =~ $pattern ]]; then
++ if [[ -z "$_ReadGlobalVersion" ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
+ ExitWithExitCode 1
+ fi
+-
+- # return value
+- _ReadGlobalVersion=${BASH_REMATCH[1]}
+ }
+
+ function InitializeDotNetCli {
+@@ -253,7 +249,7 @@ function with_retries {
+ return 0
+ fi
+
+- timeout=$((2**$retries-1))
++ timeout=$((3**$retries-1))
+ echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2
+ sleep $timeout
+ done
+@@ -271,30 +267,31 @@ function GetDotNetInstallScript {
+ if [[ ! -a "$install_script" ]]; then
+ mkdir -p "$root"
+
+- if [[ "$use_default_dotnet_install" == true ]]; then
+- echo "Downloading '$install_script_url'"
+-
+- # Use curl if available, otherwise use wget
+- if command -v curl > /dev/null; then
+- with_retries curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+- local exit_code=$?
+- Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+- ExitWithExitCode $exit_code
+- }
+- else
+- with_retries wget -v -O "$install_script" "$install_script_url" || {
++ echo "Downloading '$install_script_url'"
++
++ # Use curl if available, otherwise use wget
++ if command -v curl > /dev/null; then
++ # first, try directly, if this fails we will retry with verbose logging
++ curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
++ if command -v openssl &> /dev/null; then
++ echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
++ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
++ fi
++ echo "Will now retry the same URL with verbose logging."
++ with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+- fi
++ }
+ else
+- # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+- # See https://github.com/dotnet/arcade/issues/6047 for details
+- cp $repo_root/eng/common/dotnet-install-scripts/dotnet-install.sh $install_script
++ with_retries wget -v -O "$install_script" "$install_script_url" || {
++ local exit_code=$?
++ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
++ ExitWithExitCode $exit_code
++ }
+ fi
+ fi
+-
+ # return value
+ _GetDotNetInstallScript="$install_script"
+ }
+@@ -309,15 +306,17 @@ function InitializeBuildTool {
+ # return values
+ _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
+ _InitializeBuildToolCommand="msbuild"
+- _InitializeBuildToolFramework="netcoreapp2.1"
++ _InitializeBuildToolFramework="netcoreapp3.1"
+ }
+
++# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
+ function GetNuGetPackageCachePath {
+ if [[ -z ${NUGET_PACKAGES:-} ]]; then
+ if [[ "$use_global_nuget_cache" == true ]]; then
+ export NUGET_PACKAGES="$HOME/.nuget/packages"
+ else
+ export NUGET_PACKAGES="$repo_root/.packages"
++ export RESTORENOCACHE=true
+ fi
+ fi
+
+@@ -414,8 +413,24 @@ function MSBuild {
+ fi
+
+ local toolset_dir="${_InitializeToolset%/*}"
+- local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll"
+- args=( "${args[@]}" "-logger:$logger_path" )
++ # new scripts need to work with old packages, so we need to look for the old names/versions
++ local selectedPath=
++ local possiblePaths=()
++ possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
++ possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
++ possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
++ possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
++ for path in "${possiblePaths[@]}"; do
++ if [[ -f $path ]]; then
++ selectedPath=$path
++ break
++ fi
++ done
++ if [[ -z "$selectedPath" ]]; then
++ Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly."
++ ExitWithExitCode 1
++ fi
++ args+=( "-logger:$selectedPath" )
+ fi
+
+ MSBuild-Core ${args[@]}
+@@ -446,8 +461,17 @@ function MSBuild-Core {
+
+ "$_InitializeBuildTool" "$@" || {
+ local exit_code=$?
+- Write-PipelineTaskError "Build failed (exit code '$exit_code')."
+- ExitWithExitCode $exit_code
++ # We should not Write-PipelineTaskError here because that message shows up in the build summary
++ # The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
++ echo "Build failed with exit code $exit_code. Check errors above."
++ if [[ "$ci" == "true" ]]; then
++ Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
++ # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
++ # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
++ ExitWithExitCode 0
++ else
++ ExitWithExitCode $exit_code
++ fi
+ }
+ }
+
+@@ -461,23 +485,27 @@ _script_dir=`dirname "$_ResolvePath"`
+
+ eng_root=`cd -P "$_script_dir/.." && pwd`
+ repo_root=`cd -P "$_script_dir/../.." && pwd`
+-artifacts_dir="$repo_root/artifacts"
++repo_root="${repo_root}/"
++artifacts_dir="${repo_root}artifacts"
+ toolset_dir="$artifacts_dir/toolset"
+-tools_dir="$repo_root/.tools"
++tools_dir="${repo_root}.tools"
+ log_dir="$artifacts_dir/log/$configuration"
+ temp_dir="$artifacts_dir/tmp/$configuration"
+
+-global_json_file="$repo_root/global.json"
++global_json_file="${repo_root}global.json"
+ # determine if global.json contains a "runtimes" entry
+ global_json_has_runtimes=false
+-dotnetlocal_key=$(awk "/runtimes/ {print; exit}" "$global_json_file") || true
+-if [[ -n "$dotnetlocal_key" ]]; then
++if command -v jq &> /dev/null; then
++ if jq -er '. | select(has("runtimes"))' "$global_json_file" &> /dev/null; then
++ global_json_has_runtimes=true
++ fi
++elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then
+ global_json_has_runtimes=true
+ fi
+
+ # HOME may not be defined in some scenarios, but it is required by NuGet
+ if [[ -z $HOME ]]; then
+- export HOME="$repo_root/artifacts/.home/"
++ export HOME="${repo_root}artifacts/.home/"
+ mkdir -p "$HOME"
+ fi
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/sourcelink/0001-Update-TFMs-to-net6.0.patch b/src/SourceBuild/tarball/content/patches/sourcelink/0001-Update-TFMs-to-net6.0.patch
new file mode 100644
index 000000000..58e98997c
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/sourcelink/0001-Update-TFMs-to-net6.0.patch
@@ -0,0 +1,156 @@
+From aba2e21af6d299f0e5c04c7848e4971e0da0e5b5 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Tue, 20 Jul 2021 21:01:43 +0000
+Subject: [PATCH] Update TFMs to net6.0
+
+When building from source, only the current TFM is built. Update
+TargetFrameworks to only have net6.0.
+---
+ eng/Versions.props | 1 +
+ src/Microsoft.Build.Tasks.Git/Microsoft.Build.Tasks.Git.csproj | 1 +
+ .../Microsoft.SourceLink.AzureDevOpsServer.Git.csproj | 1 +
+ .../Microsoft.SourceLink.AzureRepos.Git.csproj | 1 +
+ .../Microsoft.SourceLink.Bitbucket.Git.csproj | 1 +
+ src/SourceLink.Common/Microsoft.SourceLink.Common.csproj | 1 +
+ src/SourceLink.GitHub/Microsoft.SourceLink.GitHub.csproj | 1 +
+ src/SourceLink.GitLab/Microsoft.SourceLink.GitLab.csproj | 1 +
+ src/SourceLink.GitWeb/Microsoft.SourceLink.GitWeb.csproj | 1 +
+ src/SourceLink.Gitea/Microsoft.SourceLink.Gitea.csproj | 1 +
+ src/SourceLink.Tools/Microsoft.SourceLink.Tools.Package.csproj | 1 +
+ 11 files changed, 11 insertions(+)
+
+diff --git a/eng/Versions.props b/eng/Versions.props
+index 4ea51a8..eff58cf 100644
+--- a/eng/Versions.props
++++ b/eng/Versions.props
+@@ -7,6 +7,7 @@
+ true
+
+ true
++ false
+ true
+ 2.4.1
+ 16.7.0
+diff --git a/src/Microsoft.Build.Tasks.Git/Microsoft.Build.Tasks.Git.csproj b/src/Microsoft.Build.Tasks.Git/Microsoft.Build.Tasks.Git.csproj
+index 71b45db..9f25e24 100644
+--- a/src/Microsoft.Build.Tasks.Git/Microsoft.Build.Tasks.Git.csproj
++++ b/src/Microsoft.Build.Tasks.Git/Microsoft.Build.Tasks.Git.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.AzureDevOpsServer.Git/Microsoft.SourceLink.AzureDevOpsServer.Git.csproj b/src/SourceLink.AzureDevOpsServer.Git/Microsoft.SourceLink.AzureDevOpsServer.Git.csproj
+index b905b8e..43cd185 100644
+--- a/src/SourceLink.AzureDevOpsServer.Git/Microsoft.SourceLink.AzureDevOpsServer.Git.csproj
++++ b/src/SourceLink.AzureDevOpsServer.Git/Microsoft.SourceLink.AzureDevOpsServer.Git.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.AzureRepos.Git/Microsoft.SourceLink.AzureRepos.Git.csproj b/src/SourceLink.AzureRepos.Git/Microsoft.SourceLink.AzureRepos.Git.csproj
+index 8a5a0bf..5400288 100644
+--- a/src/SourceLink.AzureRepos.Git/Microsoft.SourceLink.AzureRepos.Git.csproj
++++ b/src/SourceLink.AzureRepos.Git/Microsoft.SourceLink.AzureRepos.Git.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.Bitbucket.Git/Microsoft.SourceLink.Bitbucket.Git.csproj b/src/SourceLink.Bitbucket.Git/Microsoft.SourceLink.Bitbucket.Git.csproj
+index 387b65c..db2ce6c 100644
+--- a/src/SourceLink.Bitbucket.Git/Microsoft.SourceLink.Bitbucket.Git.csproj
++++ b/src/SourceLink.Bitbucket.Git/Microsoft.SourceLink.Bitbucket.Git.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.Common/Microsoft.SourceLink.Common.csproj b/src/SourceLink.Common/Microsoft.SourceLink.Common.csproj
+index b220563..5cf5ab4 100644
+--- a/src/SourceLink.Common/Microsoft.SourceLink.Common.csproj
++++ b/src/SourceLink.Common/Microsoft.SourceLink.Common.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.GitHub/Microsoft.SourceLink.GitHub.csproj b/src/SourceLink.GitHub/Microsoft.SourceLink.GitHub.csproj
+index 45985c8..1b9edbc 100644
+--- a/src/SourceLink.GitHub/Microsoft.SourceLink.GitHub.csproj
++++ b/src/SourceLink.GitHub/Microsoft.SourceLink.GitHub.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.GitLab/Microsoft.SourceLink.GitLab.csproj b/src/SourceLink.GitLab/Microsoft.SourceLink.GitLab.csproj
+index 18300ce..9ca4657 100644
+--- a/src/SourceLink.GitLab/Microsoft.SourceLink.GitLab.csproj
++++ b/src/SourceLink.GitLab/Microsoft.SourceLink.GitLab.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.GitWeb/Microsoft.SourceLink.GitWeb.csproj b/src/SourceLink.GitWeb/Microsoft.SourceLink.GitWeb.csproj
+index 1991db7..ae16d1f 100644
+--- a/src/SourceLink.GitWeb/Microsoft.SourceLink.GitWeb.csproj
++++ b/src/SourceLink.GitWeb/Microsoft.SourceLink.GitWeb.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.Gitea/Microsoft.SourceLink.Gitea.csproj b/src/SourceLink.Gitea/Microsoft.SourceLink.Gitea.csproj
+index b5bbc93..164e243 100644
+--- a/src/SourceLink.Gitea/Microsoft.SourceLink.Gitea.csproj
++++ b/src/SourceLink.Gitea/Microsoft.SourceLink.Gitea.csproj
+@@ -1,6 +1,7 @@
+
+
+ net472;net5.0
++ net6.0
+ true
+
+
+diff --git a/src/SourceLink.Tools/Microsoft.SourceLink.Tools.Package.csproj b/src/SourceLink.Tools/Microsoft.SourceLink.Tools.Package.csproj
+index 053f124..f27f774 100644
+--- a/src/SourceLink.Tools/Microsoft.SourceLink.Tools.Package.csproj
++++ b/src/SourceLink.Tools/Microsoft.SourceLink.Tools.Package.csproj
+@@ -3,6 +3,7 @@
+
+
+ net5.0;net472
++ net6.0
+ false
+ none
+ false
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/sourcelink/0002-Update-Task-paths-to-include-net6.0.patch b/src/SourceBuild/tarball/content/patches/sourcelink/0002-Update-Task-paths-to-include-net6.0.patch
new file mode 100644
index 000000000..927840635
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/sourcelink/0002-Update-Task-paths-to-include-net6.0.patch
@@ -0,0 +1,142 @@
+From e41e61e4c91785ba5ff1761c7d14d9f92a612ae4 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Thu, 22 Jul 2021 15:00:30 +0000
+Subject: [PATCH] Update Task paths to include net6.0
+
+When building from source, only the current TFM is built. Update
+tool paths to look in net6.0 path when building from source.
+---
+ .../build/Microsoft.Build.Tasks.Git.props | 1 +
+ .../build/Microsoft.SourceLink.AzureDevOpsServer.Git.targets | 1 +
+ .../build/Microsoft.SourceLink.AzureRepos.Git.targets | 1 +
+ .../build/Microsoft.SourceLink.AzureRepos.Tfvc.targets | 1 +
+ .../build/Microsoft.SourceLink.Bitbucket.Git.targets | 1 +
+ src/SourceLink.Common/build/Microsoft.SourceLink.Common.props | 1 +
+ src/SourceLink.GitHub/build/Microsoft.SourceLink.GitHub.targets | 1 +
+ src/SourceLink.GitLab/build/Microsoft.SourceLink.GitLab.targets | 1 +
+ src/SourceLink.GitWeb/build/Microsoft.SourceLink.GitWeb.targets | 1 +
+ src/SourceLink.Gitea/build/Microsoft.SourceLink.Gitea.targets | 1 +
+ 10 files changed, 10 insertions(+)
+
+diff --git a/src/Microsoft.Build.Tasks.Git/build/Microsoft.Build.Tasks.Git.props b/src/Microsoft.Build.Tasks.Git/build/Microsoft.Build.Tasks.Git.props
+index 337242e..25f1d1d 100644
+--- a/src/Microsoft.Build.Tasks.Git/build/Microsoft.Build.Tasks.Git.props
++++ b/src/Microsoft.Build.Tasks.Git/build/Microsoft.Build.Tasks.Git.props
+@@ -3,5 +3,6 @@
+
+ $(MSBuildThisFileDirectory)..\tools\net472\Microsoft.Build.Tasks.Git.dll
+ $(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.Build.Tasks.Git.dll
++ $(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.Build.Tasks.Git.dll
+
+
+diff --git a/src/SourceLink.AzureDevOpsServer.Git/build/Microsoft.SourceLink.AzureDevOpsServer.Git.targets b/src/SourceLink.AzureDevOpsServer.Git/build/Microsoft.SourceLink.AzureDevOpsServer.Git.targets
+index 1c2b313..fcb1ed8 100644
+--- a/src/SourceLink.AzureDevOpsServer.Git/build/Microsoft.SourceLink.AzureDevOpsServer.Git.targets
++++ b/src/SourceLink.AzureDevOpsServer.Git/build/Microsoft.SourceLink.AzureDevOpsServer.Git.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkAzureDevOpsServerGitAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.AzureDevOpsServer.Git.dll
+ <_SourceLinkAzureDevOpsServerGitAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.AzureDevOpsServer.Git.dll
++ <_SourceLinkAzureDevOpsServerGitAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.AzureDevOpsServer.Git.dll
+
+
+
+diff --git a/src/SourceLink.AzureRepos.Git/build/Microsoft.SourceLink.AzureRepos.Git.targets b/src/SourceLink.AzureRepos.Git/build/Microsoft.SourceLink.AzureRepos.Git.targets
+index d49e459..74a504c 100644
+--- a/src/SourceLink.AzureRepos.Git/build/Microsoft.SourceLink.AzureRepos.Git.targets
++++ b/src/SourceLink.AzureRepos.Git/build/Microsoft.SourceLink.AzureRepos.Git.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkAzureReposGitAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.AzureRepos.Git.dll
+ <_SourceLinkAzureReposGitAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.AzureRepos.Git.dll
++ <_SourceLinkAzureReposGitAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.AzureRepos.Git.dll
+
+
+
+diff --git a/src/SourceLink.AzureRepos.Tfvc/build/Microsoft.SourceLink.AzureRepos.Tfvc.targets b/src/SourceLink.AzureRepos.Tfvc/build/Microsoft.SourceLink.AzureRepos.Tfvc.targets
+index f4045fb..2f3fc2d 100644
+--- a/src/SourceLink.AzureRepos.Tfvc/build/Microsoft.SourceLink.AzureRepos.Tfvc.targets
++++ b/src/SourceLink.AzureRepos.Tfvc/build/Microsoft.SourceLink.AzureRepos.Tfvc.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkAzureReposTfvcAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.AzureRepos.Tfvc.dll
+ <_SourceLinkAzureReposTfvcAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.AzureRepos.Tfvc.dll
++ <_SourceLinkAzureReposTfvcAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.AzureRepos.Tfvc.dll
+
+
+
+diff --git a/src/SourceLink.Bitbucket.Git/build/Microsoft.SourceLink.Bitbucket.Git.targets b/src/SourceLink.Bitbucket.Git/build/Microsoft.SourceLink.Bitbucket.Git.targets
+index 0cb1491..76eb4b5 100644
+--- a/src/SourceLink.Bitbucket.Git/build/Microsoft.SourceLink.Bitbucket.Git.targets
++++ b/src/SourceLink.Bitbucket.Git/build/Microsoft.SourceLink.Bitbucket.Git.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkBitbucketAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.Bitbucket.Git.dll
+ <_SourceLinkBitbucketAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.Bitbucket.Git.dll
++ <_SourceLinkBitbucketAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.Bitbucket.Git.dll
+
+
+
+diff --git a/src/SourceLink.Common/build/Microsoft.SourceLink.Common.props b/src/SourceLink.Common/build/Microsoft.SourceLink.Common.props
+index ac63a8a..ab657ff 100644
+--- a/src/SourceLink.Common/build/Microsoft.SourceLink.Common.props
++++ b/src/SourceLink.Common/build/Microsoft.SourceLink.Common.props
+@@ -3,6 +3,7 @@
+
+ <_MicrosoftSourceLinkCommonAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.Common.dll
+ <_MicrosoftSourceLinkCommonAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.Common.dll
++ <_MicrosoftSourceLinkCommonAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.Common.dll
+
+
+
+diff --git a/src/SourceLink.GitHub/build/Microsoft.SourceLink.GitHub.targets b/src/SourceLink.GitHub/build/Microsoft.SourceLink.GitHub.targets
+index 2641835..52df43f 100644
+--- a/src/SourceLink.GitHub/build/Microsoft.SourceLink.GitHub.targets
++++ b/src/SourceLink.GitHub/build/Microsoft.SourceLink.GitHub.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkGitHubAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.GitHub.dll
+ <_SourceLinkGitHubAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.GitHub.dll
++ <_SourceLinkGitHubAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.GitHub.dll
+
+
+
+diff --git a/src/SourceLink.GitLab/build/Microsoft.SourceLink.GitLab.targets b/src/SourceLink.GitLab/build/Microsoft.SourceLink.GitLab.targets
+index ffc5140..72dae7e 100644
+--- a/src/SourceLink.GitLab/build/Microsoft.SourceLink.GitLab.targets
++++ b/src/SourceLink.GitLab/build/Microsoft.SourceLink.GitLab.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkGitLabAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.GitLab.dll
+ <_SourceLinkGitLabAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.GitLab.dll
++ <_SourceLinkGitLabAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.GitLab.dll
+
+
+
+diff --git a/src/SourceLink.GitWeb/build/Microsoft.SourceLink.GitWeb.targets b/src/SourceLink.GitWeb/build/Microsoft.SourceLink.GitWeb.targets
+index 9f21d62..8f5c453 100644
+--- a/src/SourceLink.GitWeb/build/Microsoft.SourceLink.GitWeb.targets
++++ b/src/SourceLink.GitWeb/build/Microsoft.SourceLink.GitWeb.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkGitWebAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.GitWeb.dll
+ <_SourceLinkGitWebAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.GitWeb.dll
++ <_SourceLinkGitWebAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.GitWeb.dll
+
+
+
+diff --git a/src/SourceLink.Gitea/build/Microsoft.SourceLink.Gitea.targets b/src/SourceLink.Gitea/build/Microsoft.SourceLink.Gitea.targets
+index 5b6765e..8cccda0 100644
+--- a/src/SourceLink.Gitea/build/Microsoft.SourceLink.Gitea.targets
++++ b/src/SourceLink.Gitea/build/Microsoft.SourceLink.Gitea.targets
+@@ -3,6 +3,7 @@
+
+ <_SourceLinkGiteaAssemblyFile Condition="'$(MSBuildRuntimeType)' != 'Core'">$(MSBuildThisFileDirectory)..\tools\net472\Microsoft.SourceLink.Gitea.dll
+ <_SourceLinkGiteaAssemblyFile Condition="'$(MSBuildRuntimeType)' == 'Core'">$(MSBuildThisFileDirectory)..\tools\net5.0\Microsoft.SourceLink.Gitea.dll
++ <_SourceLinkGiteaAssemblyFile Condition="'$(DotNetBuildFromSource)' == 'true'">$(MSBuildThisFileDirectory)..\tools\net6.0\Microsoft.SourceLink.Gitea.dll
+
+
+
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/patches/symreader/0001-Exlude-test-project-from-source-build.patch b/src/SourceBuild/tarball/content/patches/symreader/0001-Exlude-test-project-from-source-build.patch
new file mode 100644
index 000000000..1c6f1f3bc
--- /dev/null
+++ b/src/SourceBuild/tarball/content/patches/symreader/0001-Exlude-test-project-from-source-build.patch
@@ -0,0 +1,31 @@
+From f4978575da2a47049a941260629f0f2e54979778 Mon Sep 17 00:00:00 2001
+From: dseefeld
+Date: Thu, 22 Jul 2021 13:33:45 +0000
+Subject: [PATCH] Exlude test project from source-build
+
+---
+ src/TestUtilities/TestUtilities.csproj | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/src/TestUtilities/TestUtilities.csproj b/src/TestUtilities/TestUtilities.csproj
+index c83b5df..92279cc 100644
+--- a/src/TestUtilities/TestUtilities.csproj
++++ b/src/TestUtilities/TestUtilities.csproj
+@@ -3,6 +3,7 @@
+ netstandard1.3
+ false
+ true
++ true
+
+
+
+@@ -13,4 +14,4 @@
+
+
+
+-
+\ No newline at end of file
++
+--
+2.31.1
+
diff --git a/src/SourceBuild/tarball/content/repos/Directory.Build.props b/src/SourceBuild/tarball/content/repos/Directory.Build.props
index ccbb7e16b..9d065c35c 100644
--- a/src/SourceBuild/tarball/content/repos/Directory.Build.props
+++ b/src/SourceBuild/tarball/content/repos/Directory.Build.props
@@ -61,7 +61,7 @@
-
+
+
+ ]]>
+
+
+ $(NoWarn);NU5104
+
+]]>
+
+
+
+
+
- logger_path="%24toolset_dir"/%24%28cd "$toolset_dir" && find . -name Microsoft.DotNet.Arcade.Sdk.dll \( -regex '.*netcoreapp2.1.*' -or -regex '.*net5.0.*' \) )
+ logger_path="%24toolset_dir"/%24%28cd "$toolset_dir" && find . -name Microsoft.DotNet.Arcade.Sdk.dll \( -regex '.*netcoreapp2.1.*' -or -regex '.*net6.0.*' \) )
- logger_path="%24toolset_dir"/%24%28cd "$toolset_dir" && find . -name Microsoft.DotNet.ArcadeLogging.dll \( -regex '.*netcoreapp2.1.*' -or -regex '.*net5.0.*' \) )
+ logger_path="%24toolset_dir"/%24%28cd "$toolset_dir" && find . -name Microsoft.DotNet.ArcadeLogging.dll \( -regex '.*netcoreapp2.1.*' -or -regex '.*net6.0.*' \) )
@@ -469,7 +496,7 @@
<_BuiltIntermediatePackages Condition="'$(PackagesOutput)' != ''" Include="$(PackagesOutput)/Microsoft.SourceBuild.Intermediate.*.nupkg" Exclude="$(PackagesOutput)/*.symbols.nupkg"/>
<_BuiltIntermediatePackages Condition="'@(PackagesOutputList)' != ''" Include="%(PackagesOutputList.Identity)/Microsoft.SourceBuild.Intermediate.*.nupkg" Exclude="%(PackagesOutputList.Identity)/*.symbols.nupkg"/>
-
+
<_DestinationPath>$(SourceBuiltPackagesPath)
@@ -503,7 +530,7 @@
-
@@ -592,14 +619,14 @@
-
-
-
@@ -613,7 +640,7 @@
Condition=" '$(ArcadeSDKToolPackagePath))' != '' "
SourceFiles="@(OverrideArcadeFiles)"
DestinationFiles="$(ArcadeSDKToolPackagePath)tools/SourceBuild/%(RecursiveDir)%(Filename)%(Extension)" />
-
+
@@ -729,6 +756,25 @@
DataFile="$(PackageReportDataFile)"
ProjectAssetsJsonArchiveFile="$(ProjectAssetsJsonArchiveFile)" />
+
+
+
+
+
+
+
+
+
+
+
+
+ $([System.String]::Copy(%(Filename)).ToLower())
+
+
+
+
diff --git a/src/SourceBuild/tarball/content/repos/arcade.proj b/src/SourceBuild/tarball/content/repos/arcade.proj
index 48360daf7..7c743ded7 100644
--- a/src/SourceBuild/tarball/content/repos/arcade.proj
+++ b/src/SourceBuild/tarball/content/repos/arcade.proj
@@ -2,7 +2,9 @@
- $(StandardSourceBuildCommand) $(StandardSourceBuildArgs)
+ $(StandardSourceBuildArgs)
+ $(BuildCommandArgs) $(FlagParameterPrefix)warnAsError $(ArcadeFalseBoolBuildArg)
+ $(StandardSourceBuildCommand) $(BuildCommandArgs)
$(ProjectDirectory)NuGet.config
diff --git a/src/SourceBuild/tarball/content/repos/command-line-api.proj b/src/SourceBuild/tarball/content/repos/command-line-api.proj
index ec303ecca..2a1fb7d09 100644
--- a/src/SourceBuild/tarball/content/repos/command-line-api.proj
+++ b/src/SourceBuild/tarball/content/repos/command-line-api.proj
@@ -4,6 +4,7 @@
$(StandardSourceBuildArgs)
+ $(BuildCommandArgs) $(FlagParameterPrefix)warnAsError $(ArcadeFalseBoolBuildArg)
$(BuildCommandArgs) $(FlagParameterPrefix)nodereuse $(ArcadeFalseBoolBuildArg)
$(StandardSourceBuildCommand) $(BuildCommandArgs)
diff --git a/src/SourceBuild/tarball/content/repos/fsharp.proj b/src/SourceBuild/tarball/content/repos/fsharp.proj
index 681ca5bf9..b77de5f30 100644
--- a/src/SourceBuild/tarball/content/repos/fsharp.proj
+++ b/src/SourceBuild/tarball/content/repos/fsharp.proj
@@ -2,7 +2,9 @@
- $(StandardSourceBuildCommand) $(StandardSourceBuildArgs)
+ $(StandardSourceBuildArgs)
+ $(BuildCommandArgs) /p:TreatWarningsAsErrors=false
+ $(StandardSourceBuildCommand) $(BuildCommandArgs)
false
$(ProjectDirectory)global.json
diff --git a/src/SourceBuild/tarball/content/repos/msbuild.proj b/src/SourceBuild/tarball/content/repos/msbuild.proj
index 1556dc627..3ed6a9eaf 100644
--- a/src/SourceBuild/tarball/content/repos/msbuild.proj
+++ b/src/SourceBuild/tarball/content/repos/msbuild.proj
@@ -9,6 +9,7 @@
$(StandardSourceBuildArgs)
$(BuildCommandArgs) $(FlagParameterPrefix)v $(LogVerbosity)
$(BuildCommandArgs) $(FlagParameterPrefix)nodereuse $(ArcadeFalseBoolBuildArg)
+ $(BuildCommandArgs) $(FlagParameterPrefix)warnAsError $(ArcadeFalseBoolBuildArg)
$(BuildCommandArgs) $(OutputVersionArgs)
$(BuildCommandArgs) /p:DotNetCoreSdkDir=$(DotNetCliToolDir)
$(StandardSourceBuildCommand) $(BuildCommandArgs)
diff --git a/src/SourceBuild/tarball/content/repos/roslyn.proj b/src/SourceBuild/tarball/content/repos/roslyn.proj
index a00875f49..7a8980227 100644
--- a/src/SourceBuild/tarball/content/repos/roslyn.proj
+++ b/src/SourceBuild/tarball/content/repos/roslyn.proj
@@ -3,6 +3,7 @@
$(StandardSourceBuildArgs)
+ $(BuildCommandArgs) /p:TreatWarningsAsErrors=false
$(BuildCommandArgs) /p:ApplyPartialNgenOptimization=false
$(BuildCommandArgs) /p:EnablePartialNgenOptimization=false
$(BuildCommandArgs) /p:PublishWindowsPdb=false
diff --git a/src/SourceBuild/tarball/content/tools-local/init-build.proj b/src/SourceBuild/tarball/content/tools-local/init-build.proj
index 90d7d604a..1e2d8f407 100644
--- a/src/SourceBuild/tarball/content/tools-local/init-build.proj
+++ b/src/SourceBuild/tarball/content/tools-local/init-build.proj
@@ -75,6 +75,7 @@
+
]]>
diff --git a/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFile.cs b/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFile.cs
index 32b8cdea1..dcd6b28f1 100644
--- a/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFile.cs
+++ b/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFile.cs
@@ -24,10 +24,11 @@ namespace Microsoft.DotNet.Build.Tasks
public override bool Execute()
{
string fileContents = File.ReadAllText(InputFile);
+ string newLineChars = FileUtilities.DetectNewLineChars(fileContents);
fileContents = fileContents.Replace(OldText, NewText);
- File.WriteAllText(InputFile, fileContents);
+ File.WriteAllText(InputFile, FileUtilities.NormalizeNewLineChars(fileContents, newLineChars));
return true;
}
diff --git a/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFiles.cs b/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFiles.cs
index 30f1d6e67..79dbf00fe 100644
--- a/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFiles.cs
+++ b/src/SourceBuild/tarball/content/tools-local/tasks/Microsoft.DotNet.SourceBuild.Tasks.XPlat/ReplaceTextInFiles.cs
@@ -25,10 +25,11 @@ namespace Microsoft.DotNet.Build.Tasks
foreach (string file in InputFiles)
{
string fileContents = File.ReadAllText(file);
+ string newLineChars = FileUtilities.DetectNewLineChars(fileContents);
- fileContents = fileContents.Replace(OldText, NewText, StringComparison.Ordinal);
+ fileContents = fileContents.Replace(OldText, NewText);
- File.WriteAllText(file, fileContents);
+ File.WriteAllText(file, FileUtilities.NormalizeNewLineChars(fileContents, newLineChars));
}
return true;