Browse Source

Bump .NET Arcade to 10.0.0-beta.25562.108

pull/2406/head
Kévin Chalet 1 month ago
parent
commit
1ef7be587f
  1. 4
      .github/workflows/build.yml
  2. 12
      eng/Version.Details.xml
  3. 94
      eng/common/SetupNugetSources.ps1
  4. 190
      eng/common/SetupNugetSources.sh
  5. 11
      eng/common/build.ps1
  6. 33
      eng/common/build.sh
  7. 50
      eng/common/core-templates/job/job.yml
  8. 33
      eng/common/core-templates/job/onelocbuild.yml
  9. 88
      eng/common/core-templates/job/publish-build-assets.yml
  10. 7
      eng/common/core-templates/job/source-build.yml
  11. 47
      eng/common/core-templates/job/source-index-stage1.yml
  12. 3
      eng/common/core-templates/jobs/codeql-build.yml
  13. 19
      eng/common/core-templates/jobs/jobs.yml
  14. 18
      eng/common/core-templates/jobs/source-build.yml
  15. 28
      eng/common/core-templates/post-build/post-build.yml
  16. 2
      eng/common/core-templates/post-build/setup-maestro-vars.yml
  17. 28
      eng/common/core-templates/steps/cleanup-microbuild.yml
  18. 12
      eng/common/core-templates/steps/enable-internal-sources.yml
  19. 6
      eng/common/core-templates/steps/generate-sbom.yml
  20. 11
      eng/common/core-templates/steps/get-delegation-sas.yml
  21. 90
      eng/common/core-templates/steps/install-microbuild.yml
  22. 17
      eng/common/core-templates/steps/publish-logs.yml
  23. 82
      eng/common/core-templates/steps/source-build.yml
  24. 35
      eng/common/core-templates/steps/source-index-stage1-publish.yml
  25. 0
      eng/common/cross/arm/tizen-build-rootfs.sh
  26. 0
      eng/common/cross/arm/tizen-fetch.sh
  27. 2
      eng/common/cross/arm64/tizen/tizen.patch
  28. 49
      eng/common/cross/build-android-rootfs.sh
  29. 235
      eng/common/cross/build-rootfs.sh
  30. 334
      eng/common/cross/install-debs.py
  31. 0
      eng/common/cross/tizen-build-rootfs.sh
  32. 9
      eng/common/cross/tizen-fetch.sh
  33. 82
      eng/common/cross/toolchain.cmake
  34. 0
      eng/common/cross/x86/tizen-build-rootfs.sh
  35. 0
      eng/common/cross/x86/tizen-fetch.sh
  36. 2
      eng/common/darc-init.sh
  37. 7
      eng/common/dotnet.cmd
  38. 11
      eng/common/dotnet.ps1
  39. 26
      eng/common/dotnet.sh
  40. 49
      eng/common/generate-locproject.ps1
  41. 20
      eng/common/generate-sbom-prep.ps1
  42. 17
      eng/common/generate-sbom-prep.sh
  43. 3
      eng/common/internal/NuGet.config
  44. 10
      eng/common/internal/Tools.csproj
  45. 0
      eng/common/native/init-compiler.sh
  46. 0
      eng/common/native/init-distro-rid.sh
  47. 0
      eng/common/native/init-os-and-arch.sh
  48. 62
      eng/common/native/install-dependencies.sh
  49. 2
      eng/common/post-build/nuget-verification.ps1
  50. 12
      eng/common/post-build/publish-using-darc.ps1
  51. 12
      eng/common/sdk-task.ps1
  52. 121
      eng/common/sdk-task.sh
  53. 2
      eng/common/sdl/packages.config
  54. 4
      eng/common/template-guidance.md
  55. 7
      eng/common/templates-official/job/job.yml
  56. 7
      eng/common/templates-official/steps/publish-build-artifacts.yml
  57. 7
      eng/common/templates-official/steps/source-index-stage1-publish.yml
  58. 2
      eng/common/templates-official/variables/sdl-variables.yml
  59. 8
      eng/common/templates/job/job.yml
  60. 6
      eng/common/templates/steps/publish-build-artifacts.yml
  61. 7
      eng/common/templates/steps/source-index-stage1-publish.yml
  62. 207
      eng/common/templates/steps/vmr-sync.yml
  63. 42
      eng/common/templates/vmr-build-pr.yml
  64. 72
      eng/common/tools.ps1
  65. 77
      eng/common/tools.sh
  66. 138
      eng/common/vmr-sync.ps1
  67. 207
      eng/common/vmr-sync.sh
  68. 4
      global.json
  69. 2
      test/OpenIddict.Abstractions.Tests/OpenIddict.Abstractions.Tests.csproj
  70. 2
      test/OpenIddict.Client.AspNetCore.IntegrationTests/OpenIddict.Client.AspNetCore.IntegrationTests.csproj
  71. 2
      test/OpenIddict.Client.IntegrationTests/OpenIddict.Client.IntegrationTests.csproj
  72. 2
      test/OpenIddict.Client.Owin.IntegrationTests/OpenIddict.Client.Owin.IntegrationTests.csproj
  73. 2
      test/OpenIddict.Core.Tests/OpenIddict.Core.Tests.csproj
  74. 2
      test/OpenIddict.EntityFramework.Tests/OpenIddict.EntityFramework.Tests.csproj
  75. 2
      test/OpenIddict.EntityFrameworkCore.Tests/OpenIddict.EntityFrameworkCore.Tests.csproj
  76. 2
      test/OpenIddict.Quartz.Tests/OpenIddict.Quartz.Tests.csproj
  77. 2
      test/OpenIddict.Server.AspNetCore.IntegrationTests/OpenIddict.Server.AspNetCore.IntegrationTests.csproj
  78. 2
      test/OpenIddict.Server.DataProtection.Tests/OpenIddict.Server.DataProtection.Tests.csproj
  79. 2
      test/OpenIddict.Server.IntegrationTests/OpenIddict.Server.IntegrationTests.csproj
  80. 2
      test/OpenIddict.Server.Owin.IntegrationTests/OpenIddict.Server.Owin.IntegrationTests.csproj
  81. 2
      test/OpenIddict.Server.Tests/OpenIddict.Server.Tests.csproj
  82. 2
      test/OpenIddict.Validation.AspNetCore.IntegrationTests/OpenIddict.Validation.AspNetCore.IntegrationTests.csproj
  83. 2
      test/OpenIddict.Validation.IntegrationTests/OpenIddict.Validation.IntegrationTests.csproj
  84. 2
      test/OpenIddict.Validation.Owin.IntegrationTests/OpenIddict.Validation.Owin.IntegrationTests.csproj

4
.github/workflows/build.yml

@ -70,12 +70,12 @@ jobs:
- name: Build, test and pack - name: Build, test and pack
if: ${{ runner.os == 'Windows' }} if: ${{ runner.os == 'Windows' }}
run: eng\common\CIBuild.cmd -configuration Release -prepareMachine -integrationTest /p:RestoreDotNetWorkloads=true run: eng\common\Build.cmd -configuration Release -ci -prepareMachine -restore -build -test -sign -pack -integrationTest /p:RestoreDotNetWorkloads=true
- name: Build, test and pack - name: Build, test and pack
if: ${{ runner.os != 'Windows' }} if: ${{ runner.os != 'Windows' }}
shell: pwsh shell: pwsh
run: ./eng/common/cibuild.sh -configuration Release -prepareMachine -integrationTest /p:RestoreDotNetWorkloads=true run: ./eng/common/build.sh -configuration Release -ci -prepareMachine -restore -build -test -sign -pack -integrationTest /p:RestoreDotNetWorkloads=true
- name: Attest artifacts - name: Attest artifacts
uses: actions/attest-build-provenance@bdd51370e0416ac948727f861e03c2f05d32d78e # v1.3.2 uses: actions/attest-build-provenance@bdd51370e0416ac948727f861e03c2f05d32d78e # v1.3.2

12
eng/Version.Details.xml

@ -5,14 +5,14 @@
</ProductDependencies> </ProductDependencies>
<ToolsetDependencies> <ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="9.0.0-beta.25058.5"> <Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="10.0.0-beta.25562.108">
<Uri>https://github.com/dotnet/arcade</Uri> <Uri>https://github.com/dotnet/dotnet</Uri>
<Sha>8cc6ecd76c24ef6665579a5c5e386a211a1e7c54</Sha> <Sha>f4701e048e6a684237a4b52b745e21b1d857278d</Sha>
</Dependency> </Dependency>
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="9.0.0-beta.25058.5"> <Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="10.0.0-beta.25562.108">
<Uri>https://github.com/dotnet/arcade</Uri> <Uri>https://github.com/dotnet/dotnet</Uri>
<Sha>8cc6ecd76c24ef6665579a5c5e386a211a1e7c54</Sha> <Sha>f4701e048e6a684237a4b52b745e21b1d857278d</Sha>
</Dependency> </Dependency>
</ToolsetDependencies> </ToolsetDependencies>

94
eng/common/SetupNugetSources.ps1

@ -1,17 +1,18 @@
# This script adds internal feeds required to build commits that depend on internal package sources. For instance, # This script adds internal feeds required to build commits that depend on internal package sources. For instance,
# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables # dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly,
# disabled internal Maestro (darc-int*) feeds. # dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present.
# In addition, this script also enables disabled internal Maestro (darc-int*) feeds.
# #
# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
# #
# See example call for this script below. # See example call for this script below.
# #
# - task: PowerShell@2 # - task: PowerShell@2
# displayName: Setup Private Feeds Credentials # displayName: Setup internal Feeds Credentials
# condition: eq(variables['Agent.OS'], 'Windows_NT') # condition: eq(variables['Agent.OS'], 'Windows_NT')
# inputs: # inputs:
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 # filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token # arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
# env: # env:
# Token: $(dn-bot-dnceng-artifact-feeds-rw) # Token: $(dn-bot-dnceng-artifact-feeds-rw)
# #
@ -34,19 +35,28 @@ Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1 . $PSScriptRoot\tools.ps1
# Adds or enables the package source with the given name
function AddOrEnablePackageSource($sources, $disabledPackageSources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
if ($disabledPackageSources -eq $null -or -not (EnableInternalPackageSource -DisabledPackageSources $disabledPackageSources -Creds $creds -PackageSourceName $SourceName)) {
AddPackageSource -Sources $sources -SourceName $SourceName -SourceEndPoint $SourceEndPoint -Creds $creds -Username $userName -pwd $Password
}
}
# Add source entry to PackageSources # Add source entry to PackageSources
function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null) if ($packageSource -eq $null)
{ {
Write-Host "Adding package source $SourceName"
$packageSource = $doc.CreateElement("add") $packageSource = $doc.CreateElement("add")
$packageSource.SetAttribute("key", $SourceName) $packageSource.SetAttribute("key", $SourceName)
$packageSource.SetAttribute("value", $SourceEndPoint) $packageSource.SetAttribute("value", $SourceEndPoint)
$sources.AppendChild($packageSource) | Out-Null $sources.AppendChild($packageSource) | Out-Null
} }
else { else {
Write-Host "Package source $SourceName already present." Write-Host "Package source $SourceName already present and enabled."
} }
AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
@ -59,6 +69,8 @@ function AddCredential($creds, $source, $username, $pwd) {
return; return;
} }
Write-Host "Inserting credential for feed: " $source
# Looks for credential configuration for the given SourceName. Create it if none is found. # Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source) $sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null) if ($sourceElement -eq $null)
@ -91,24 +103,27 @@ function AddCredential($creds, $source, $username, $pwd) {
$passwordElement.SetAttribute("value", $pwd) $passwordElement.SetAttribute("value", $pwd)
} }
function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { # Enable all darc-int package sources.
$maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") function EnableMaestroInternalPackageSources($DisabledPackageSources, $Creds) {
$maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." ForEach ($DisabledPackageSource in $maestroInternalSources) {
EnableInternalPackageSource -DisabledPackageSources $DisabledPackageSources -Creds $Creds -PackageSourceName $DisabledPackageSource.key
ForEach ($PackageSource in $maestroPrivateSources) {
Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
} }
} }
function EnablePrivatePackageSources($DisabledPackageSources) { # Enables an internal package source by name, if found. Returns true if the package source was found and enabled, false otherwise.
$maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") function EnableInternalPackageSource($DisabledPackageSources, $Creds, $PackageSourceName) {
ForEach ($DisabledPackageSource in $maestroPrivateSources) { $DisabledPackageSource = $DisabledPackageSources.SelectSingleNode("add[@key='$PackageSourceName']")
Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource" if ($DisabledPackageSource) {
Write-Host "Enabling internal source '$($DisabledPackageSource.key)'."
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
$DisabledPackageSources.RemoveChild($DisabledPackageSource) $DisabledPackageSources.RemoveChild($DisabledPackageSource)
AddCredential -Creds $creds -Source $DisabledPackageSource.Key -Username $userName -pwd $Password
return $true
} }
return $false
} }
if (!(Test-Path $ConfigFile -PathType Leaf)) { if (!(Test-Path $ConfigFile -PathType Leaf)) {
@ -121,15 +136,17 @@ $doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName $filename = (Get-Item $ConfigFile).FullName
$doc.Load($filename) $doc.Load($filename)
# Get reference to <PackageSources> or create one if none exist already # Get reference to <PackageSources> - fail if none exist
$sources = $doc.DocumentElement.SelectSingleNode("packageSources") $sources = $doc.DocumentElement.SelectSingleNode("packageSources")
if ($sources -eq $null) { if ($sources -eq $null) {
$sources = $doc.CreateElement("packageSources") Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
$doc.DocumentElement.AppendChild($sources) | Out-Null ExitWithExitCode 1
} }
$creds = $null $creds = $null
$feedSuffix = "v3/index.json"
if ($Password) { if ($Password) {
$feedSuffix = "v2"
# Looks for a <PackageSourceCredentials> node. Create it if none is found. # Looks for a <PackageSourceCredentials> node. Create it if none is found.
$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
if ($creds -eq $null) { if ($creds -eq $null) {
@ -138,34 +155,35 @@ if ($Password) {
} }
} }
$userName = "dn-bot"
# Check for disabledPackageSources; we'll enable any darc-int ones we find there # Check for disabledPackageSources; we'll enable any darc-int ones we find there
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources") $disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
if ($disabledSources -ne $null) { if ($disabledSources -ne $null) {
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node" Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
EnablePrivatePackageSources -DisabledPackageSources $disabledSources EnableMaestroInternalPackageSources -DisabledPackageSources $disabledSources -Creds $creds
} }
$dotnetVersions = @('5','6','7','8','9','10')
$userName = "dn-bot"
# Insert credential nodes for Maestro's private feeds
InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
# 3.1 uses a different feed url format so it's handled differently here
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
}
$dotnetVersions = @('5','6','7','8','9')
foreach ($dotnetVersion in $dotnetVersions) { foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion; $feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) { if ($dotnetSource -ne $null) {
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
} }
} }
# Check for dotnet-eng and add dotnet-eng-internal if present
$dotnetEngSource = $sources.SelectSingleNode("add[@key='dotnet-eng']")
if ($dotnetEngSource -ne $null) {
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-eng-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
# Check for dotnet-tools and add dotnet-tools-internal if present
$dotnetToolsSource = $sources.SelectSingleNode("add[@key='dotnet-tools']")
if ($dotnetToolsSource -ne $null) {
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-tools-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
$doc.Save($filename) $doc.Save($filename)

190
eng/common/SetupNugetSources.sh

@ -1,8 +1,9 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# This script adds internal feeds required to build commits that depend on internal package sources. For instance, # This script adds internal feeds required to build commits that depend on internal package sources. For instance,
# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables # dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly,
# disabled internal Maestro (darc-int*) feeds. # dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present.
# In addition, this script also enables disabled internal Maestro (darc-int*) feeds.
# #
# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
# #
@ -11,8 +12,8 @@
# - task: Bash@3 # - task: Bash@3
# displayName: Setup Internal Feeds # displayName: Setup Internal Feeds
# inputs: # inputs:
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh # filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.sh
# arguments: $(Build.SourcesDirectory)/NuGet.config # arguments: $(System.DefaultWorkingDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT') # condition: ne(variables['Agent.OS'], 'Windows_NT')
# - task: NuGetAuthenticate@1 # - task: NuGetAuthenticate@1
# #
@ -52,81 +53,139 @@ if [[ `uname -s` == "Darwin" ]]; then
TB='' TB=''
fi fi
# Ensure there is a <packageSources>...</packageSources> section. # Enables an internal package source by name, if found. Returns 0 if found and enabled, 1 if not found.
grep -i "<packageSources>" $ConfigFile EnableInternalPackageSource() {
if [ "$?" != "0" ]; then local PackageSourceName="$1"
echo "Adding <packageSources>...</packageSources> section."
ConfigNodeHeader="<configuration>"
PackageSourcesTemplate="${TB}<packageSources>${NL}${TB}</packageSources>"
sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile # Check if disabledPackageSources section exists
fi grep -i "<disabledPackageSources>" "$ConfigFile" > /dev/null
if [ "$?" != "0" ]; then
return 1 # No disabled sources section
fi
# Ensure there is a <packageSourceCredentials>...</packageSourceCredentials> section. # Check if this source name is disabled
grep -i "<packageSourceCredentials>" $ConfigFile grep -i "<add key=\"$PackageSourceName\" value=\"true\"" "$ConfigFile" > /dev/null
if [ "$?" != "0" ]; then if [ "$?" == "0" ]; then
echo "Adding <packageSourceCredentials>...</packageSourceCredentials> section." echo "Enabling internal source '$PackageSourceName'."
# Remove the disabled entry (including any surrounding comments or whitespace on the same line)
sed -i.bak "/<add key=\"$PackageSourceName\" value=\"true\" \/>/d" "$ConfigFile"
# Add the source name to PackageSources for credential handling
PackageSources+=("$PackageSourceName")
return 0 # Found and enabled
fi
return 1 # Not found in disabled sources
}
# Add source entry to PackageSources
AddPackageSource() {
local SourceName="$1"
local SourceEndPoint="$2"
# Check if source already exists
grep -i "<add key=\"$SourceName\"" "$ConfigFile" > /dev/null
if [ "$?" == "0" ]; then
echo "Package source $SourceName already present and enabled."
PackageSources+=("$SourceName")
return
fi
echo "Adding package source $SourceName"
PackageSourcesNodeFooter="</packageSources>" PackageSourcesNodeFooter="</packageSources>"
PackageSourceCredentialsTemplate="${TB}<packageSourceCredentials>${NL}${TB}</packageSourceCredentials>" PackageSourceTemplate="${TB}<add key=\"$SourceName\" value=\"$SourceEndPoint\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" "$ConfigFile"
fi PackageSources+=("$SourceName")
}
PackageSources=() # Adds or enables the package source with the given name
AddOrEnablePackageSource() {
local SourceName="$1"
local SourceEndPoint="$2"
# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present # Try to enable if disabled, if not found then add new source
grep -i "<add key=\"dotnet3.1\"" $ConfigFile EnableInternalPackageSource "$SourceName"
if [ "$?" == "0" ]; then
grep -i "<add key=\"dotnet3.1-internal\"" $ConfigFile
if [ "$?" != "0" ]; then if [ "$?" != "0" ]; then
echo "Adding dotnet3.1-internal to the packageSources." AddPackageSource "$SourceName" "$SourceEndPoint"
PackageSourcesNodeFooter="</packageSources>" fi
PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2\" />" }
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile # Enable all darc-int package sources
EnableMaestroInternalPackageSources() {
# Check if disabledPackageSources section exists
grep -i "<disabledPackageSources>" "$ConfigFile" > /dev/null
if [ "$?" != "0" ]; then
return # No disabled sources section
fi fi
PackageSources+=('dotnet3.1-internal')
grep -i "<add key=\"dotnet3.1-internal-transport\">" $ConfigFile # Find all darc-int disabled sources
local DisabledDarcIntSources=()
DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' "$ConfigFile" | tr -d '"')
for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
if [[ $DisabledSourceName == darc-int* ]]; then
EnableInternalPackageSource "$DisabledSourceName"
fi
done
}
# Ensure there is a <packageSources>...</packageSources> section.
grep -i "<packageSources>" $ConfigFile
if [ "$?" != "0" ]; then
Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
ExitWithExitCode 1
fi
PackageSources=()
# Set feed suffix based on whether credentials are provided
FeedSuffix="v3/index.json"
if [ -n "$CredToken" ]; then
FeedSuffix="v2"
# Ensure there is a <packageSourceCredentials>...</packageSourceCredentials> section.
grep -i "<packageSourceCredentials>" $ConfigFile
if [ "$?" != "0" ]; then if [ "$?" != "0" ]; then
echo "Adding dotnet3.1-internal-transport to the packageSources." echo "Adding <packageSourceCredentials>...</packageSourceCredentials> section."
PackageSourcesNodeFooter="</packageSources>" PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2\" />" PackageSourceCredentialsTemplate="${TB}<packageSourceCredentials>${NL}${TB}</packageSourceCredentials>"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
fi fi
PackageSources+=('dotnet3.1-internal-transport')
fi fi
DotNetVersions=('5' '6' '7' '8' '9') # Check for disabledPackageSources; we'll enable any darc-int ones we find there
grep -i "<disabledPackageSources>" $ConfigFile > /dev/null
if [ "$?" == "0" ]; then
echo "Checking for any darc-int disabled package sources in the disabledPackageSources node"
EnableMaestroInternalPackageSources
fi
DotNetVersions=('5' '6' '7' '8' '9' '10')
for DotNetVersion in ${DotNetVersions[@]} ; do for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}"; FeedPrefix="dotnet${DotNetVersion}";
grep -i "<add key=\"$FeedPrefix\"" $ConfigFile grep -i "<add key=\"$FeedPrefix\"" $ConfigFile > /dev/null
if [ "$?" == "0" ]; then if [ "$?" == "0" ]; then
grep -i "<add key=\"$FeedPrefix-internal\"" $ConfigFile AddOrEnablePackageSource "$FeedPrefix-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/$FeedSuffix"
if [ "$?" != "0" ]; then AddOrEnablePackageSource "$FeedPrefix-internal-transport" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/$FeedSuffix"
echo "Adding $FeedPrefix-internal to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=("$FeedPrefix-internal")
grep -i "<add key=\"$FeedPrefix-internal-transport\">" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding $FeedPrefix-internal-transport to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=("$FeedPrefix-internal-transport")
fi fi
done done
# Check for dotnet-eng and add dotnet-eng-internal if present
grep -i "<add key=\"dotnet-eng\"" $ConfigFile > /dev/null
if [ "$?" == "0" ]; then
AddOrEnablePackageSource "dotnet-eng-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$FeedSuffix"
fi
# Check for dotnet-tools and add dotnet-tools-internal if present
grep -i "<add key=\"dotnet-tools\"" $ConfigFile > /dev/null
if [ "$?" == "0" ]; then
AddOrEnablePackageSource "dotnet-tools-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$FeedSuffix"
fi
# I want things split line by line # I want things split line by line
PrevIFS=$IFS PrevIFS=$IFS
IFS=$'\n' IFS=$'\n'
@ -139,29 +198,12 @@ if [ "$CredToken" ]; then
# Check if there is no existing credential for this FeedName # Check if there is no existing credential for this FeedName
grep -i "<$FeedName>" $ConfigFile grep -i "<$FeedName>" $ConfigFile
if [ "$?" != "0" ]; then if [ "$?" != "0" ]; then
echo "Adding credentials for $FeedName." echo " Inserting credential for feed: $FeedName"
PackageSourceCredentialsNodeFooter="</packageSourceCredentials>" PackageSourceCredentialsNodeFooter="</packageSourceCredentials>"
NewCredential="${TB}${TB}<$FeedName>${NL}<add key=\"Username\" value=\"dn-bot\" />${NL}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}</$FeedName>" NewCredential="${TB}${TB}<$FeedName>${NL}${TB}<add key=\"Username\" value=\"dn-bot\" />${NL}${TB}${TB}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}${TB}${TB}</$FeedName>"
sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
fi fi
done done
fi fi
# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
grep -i "<disabledPackageSources>" $ConfigFile
if [ "$?" == "0" ]; then
DisabledDarcIntSources=()
echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
if [[ $DisabledSourceName == darc-int* ]]
then
OldDisableValue="<add key=\"$DisabledSourceName\" value=\"true\" />"
NewDisableValue="<!-- Reenabled for build : $DisabledSourceName -->"
sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
fi
done
fi

11
eng/common/build.ps1

@ -7,6 +7,7 @@ Param(
[string] $msbuildEngine = $null, [string] $msbuildEngine = $null,
[bool] $warnAsError = $true, [bool] $warnAsError = $true,
[bool] $nodeReuse = $true, [bool] $nodeReuse = $true,
[switch] $buildCheck = $false,
[switch][Alias('r')]$restore, [switch][Alias('r')]$restore,
[switch] $deployDeps, [switch] $deployDeps,
[switch][Alias('b')]$build, [switch][Alias('b')]$build,
@ -20,6 +21,7 @@ Param(
[switch] $publish, [switch] $publish,
[switch] $clean, [switch] $clean,
[switch][Alias('pb')]$productBuild, [switch][Alias('pb')]$productBuild,
[switch]$fromVMR,
[switch][Alias('bl')]$binaryLog, [switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog, [switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci, [switch] $ci,
@ -71,6 +73,9 @@ function Print-Usage() {
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
Write-Host " -nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
Write-Host " -buildCheck Sets /check msbuild parameter"
Write-Host " -fromVMR Set when building from within the VMR"
Write-Host "" Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild." Write-Host "Command line arguments not listed above are passed thru to msbuild."
@ -97,6 +102,7 @@ function Build {
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
$check = if ($buildCheck) { '/check' } else { '' }
if ($projects) { if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
@ -113,6 +119,7 @@ function Build {
MSBuild $toolsetBuildProj ` MSBuild $toolsetBuildProj `
$bl ` $bl `
$platformArg ` $platformArg `
$check `
/p:Configuration=$configuration ` /p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot ` /p:RepoRoot=$RepoRoot `
/p:Restore=$restore ` /p:Restore=$restore `
@ -122,11 +129,13 @@ function Build {
/p:Deploy=$deploy ` /p:Deploy=$deploy `
/p:Test=$test ` /p:Test=$test `
/p:Pack=$pack ` /p:Pack=$pack `
/p:DotNetBuildRepo=$productBuild ` /p:DotNetBuild=$productBuild `
/p:DotNetBuildFromVMR=$fromVMR `
/p:IntegrationTest=$integrationTest ` /p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest ` /p:PerformanceTest=$performanceTest `
/p:Sign=$sign ` /p:Sign=$sign `
/p:Publish=$publish ` /p:Publish=$publish `
/p:RestoreStaticGraphEnableBinaryLogger=$binaryLog `
@properties @properties
} }

33
eng/common/build.sh

@ -42,6 +42,8 @@ usage()
echo " --prepareMachine Prepare machine for CI run, clean up processes after build" echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')" echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')" echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
echo " --buildCheck <value> Sets /check msbuild parameter"
echo " --fromVMR Set when building from within the VMR"
echo "" echo ""
echo "Command line arguments not listed above are passed thru to msbuild." echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen." echo "Arguments can also be passed in with a single hyphen."
@ -63,6 +65,7 @@ restore=false
build=false build=false
source_build=false source_build=false
product_build=false product_build=false
from_vmr=false
rebuild=false rebuild=false
test=false test=false
integration_test=false integration_test=false
@ -76,6 +79,7 @@ clean=false
warn_as_error=true warn_as_error=true
node_reuse=true node_reuse=true
build_check=false
binary_log=false binary_log=false
exclude_ci_binary_log=false exclude_ci_binary_log=false
pipelines_log=false pipelines_log=false
@ -87,7 +91,7 @@ verbosity='minimal'
runtime_source_feed='' runtime_source_feed=''
runtime_source_feed_key='' runtime_source_feed_key=''
properties='' properties=()
while [[ $# > 0 ]]; do while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in case "$opt" in
@ -127,19 +131,22 @@ while [[ $# > 0 ]]; do
-pack) -pack)
pack=true pack=true
;; ;;
-sourcebuild|-sb) -sourcebuild|-source-build|-sb)
build=true build=true
source_build=true source_build=true
product_build=true product_build=true
restore=true restore=true
pack=true pack=true
;; ;;
-productBuild|-pb) -productbuild|-product-build|-pb)
build=true build=true
product_build=true product_build=true
restore=true restore=true
pack=true pack=true
;; ;;
-fromvmr|-from-vmr)
from_vmr=true
;;
-test|-t) -test|-t)
test=true test=true
;; ;;
@ -173,6 +180,9 @@ while [[ $# > 0 ]]; do
node_reuse=$2 node_reuse=$2
shift shift
;; ;;
-buildcheck)
build_check=true
;;
-runtimesourcefeed) -runtimesourcefeed)
runtime_source_feed=$2 runtime_source_feed=$2
shift shift
@ -182,7 +192,7 @@ while [[ $# > 0 ]]; do
shift shift
;; ;;
*) *)
properties="$properties $1" properties+=("$1")
;; ;;
esac esac
@ -216,7 +226,7 @@ function Build {
InitializeCustomToolset InitializeCustomToolset
if [[ ! -z "$projects" ]]; then if [[ ! -z "$projects" ]]; then
properties="$properties /p:Projects=$projects" properties+=("/p:Projects=$projects")
fi fi
local bl="" local bl=""
@ -224,15 +234,21 @@ function Build {
bl="/bl:\"$log_dir/Build.binlog\"" bl="/bl:\"$log_dir/Build.binlog\""
fi fi
local check=""
if [[ "$build_check" == true ]]; then
check="/check"
fi
MSBuild $_InitializeToolset \ MSBuild $_InitializeToolset \
$bl \ $bl \
$check \
/p:Configuration=$configuration \ /p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \ /p:RepoRoot="$repo_root" \
/p:Restore=$restore \ /p:Restore=$restore \
/p:Build=$build \ /p:Build=$build \
/p:DotNetBuildRepo=$product_build \ /p:DotNetBuild=$product_build \
/p:ArcadeBuildFromSource=$source_build \
/p:DotNetBuildSourceOnly=$source_build \ /p:DotNetBuildSourceOnly=$source_build \
/p:DotNetBuildFromVMR=$from_vmr \
/p:Rebuild=$rebuild \ /p:Rebuild=$rebuild \
/p:Test=$test \ /p:Test=$test \
/p:Pack=$pack \ /p:Pack=$pack \
@ -240,7 +256,8 @@ function Build {
/p:PerformanceTest=$performance_test \ /p:PerformanceTest=$performance_test \
/p:Sign=$sign \ /p:Sign=$sign \
/p:Publish=$publish \ /p:Publish=$publish \
$properties /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \
${properties[@]+"${properties[@]}"}
ExitWithExitCode 0 ExitWithExitCode 0
} }

50
eng/common/core-templates/job/job.yml

@ -19,10 +19,11 @@ parameters:
# publishing defaults # publishing defaults
artifacts: '' artifacts: ''
enableMicrobuild: false enableMicrobuild: false
enableMicrobuildForMacAndLinux: false
microbuildUseESRP: true
enablePublishBuildArtifacts: false enablePublishBuildArtifacts: false
enablePublishBuildAssets: false enablePublishBuildAssets: false
enablePublishTestResults: false enablePublishTestResults: false
enablePublishUsingPipelines: false
enableBuildRetry: false enableBuildRetry: false
mergeTestResults: false mergeTestResults: false
testRunTitle: '' testRunTitle: ''
@ -73,9 +74,6 @@ jobs:
- ${{ if ne(parameters.enableTelemetry, 'false') }}: - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE - name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)' value: '$(Build.Repository.Uri)'
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts. # Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
@ -127,18 +125,12 @@ jobs:
- ${{ preStep }} - ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}: - template: /eng/common/core-templates/steps/install-microbuild.yml
- task: MicroBuildSigningPlugin@4 parameters:
displayName: Install MicroBuild plugin enableMicrobuild: ${{ parameters.enableMicrobuild }}
inputs: enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
signType: $(_SignType) microbuildUseESRP: ${{ parameters.microbuildUseESRP }}
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
continueOnError: ${{ parameters.continueOnError }} continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1 - task: NuGetAuthenticate@1
@ -154,27 +146,15 @@ jobs:
- ${{ each step in parameters.steps }}: - ${{ each step in parameters.steps }}:
- ${{ step }} - ${{ step }}
- ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- task: RichCodeNavIndexer@0
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
- ${{ each step in parameters.componentGovernanceSteps }}: - ${{ each step in parameters.componentGovernanceSteps }}:
- ${{ step }} - ${{ step }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - template: /eng/common/core-templates/steps/cleanup-microbuild.yml
- task: MicroBuildCleanup@1 parameters:
displayName: Execute Microbuild cleanup tasks enableMicrobuild: ${{ parameters.enableMicrobuild }}
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
continueOnError: ${{ parameters.continueOnError }} continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
# Publish test results # Publish test results
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
@ -183,7 +163,7 @@ jobs:
inputs: inputs:
testResultsFormat: 'xUnit' testResultsFormat: 'xUnit'
testResultsFiles: '*.xml' testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }} mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true continueOnError: true
@ -194,7 +174,7 @@ jobs:
inputs: inputs:
testResultsFormat: 'VSTest' testResultsFormat: 'VSTest'
testResultsFiles: '*.trx' testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }} mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true continueOnError: true
@ -238,7 +218,7 @@ jobs:
- task: CopyFiles@2 - task: CopyFiles@2
displayName: Gather buildconfiguration for build retry displayName: Gather buildconfiguration for build retry
inputs: inputs:
SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' SourceFolder: '$(System.DefaultWorkingDirectory)/eng/common/BuildConfiguration'
Contents: '**' Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
continueOnError: true continueOnError: true

33
eng/common/core-templates/job/onelocbuild.yml

@ -8,7 +8,7 @@ parameters:
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT) GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
SourcesDirectory: $(Build.SourcesDirectory) SourcesDirectory: $(System.DefaultWorkingDirectory)
CreatePr: true CreatePr: true
AutoCompletePr: false AutoCompletePr: false
ReusePr: true ReusePr: true
@ -68,7 +68,7 @@ jobs:
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2 - task: Powershell@2
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments) arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json displayName: Generate LocProject.json
condition: ${{ parameters.condition }} condition: ${{ parameters.condition }}
@ -86,8 +86,7 @@ jobs:
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}: ${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
${{ if eq(parameters.RepoType, 'gitHub') }}: isShouldReusePrSelected: ${{ parameters.ReusePr }}
isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }} patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}: ${{ if eq(parameters.RepoType, 'gitHub') }}:
@ -100,22 +99,20 @@ jobs:
mirrorBranch: ${{ parameters.MirrorBranch }} mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }} condition: ${{ parameters.condition }}
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
parameters: - task: CopyFiles@2
is1ESPipeline: ${{ parameters.is1ESPipeline }} displayName: Copy LocProject.json
args: inputs:
displayName: Publish Localization Files SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' Contents: 'LocProject.json'
publishLocation: Container TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
artifactName: Loc condition: ${{ parameters.condition }}
condition: ${{ parameters.condition }}
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters: parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
args: args:
displayName: Publish LocProject.json targetPath: '$(Build.ArtifactStagingDirectory)/loc'
pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' artifactName: 'Loc'
publishLocation: Container displayName: 'Publish Localization Files'
artifactName: Loc
condition: ${{ parameters.condition }} condition: ${{ parameters.condition }}

88
eng/common/core-templates/job/publish-build-assets.yml

@ -20,9 +20,6 @@ parameters:
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false publishAssetsImmediately: false
@ -32,6 +29,19 @@ parameters:
is1ESPipeline: '' is1ESPipeline: ''
# Optional: 🌤️ or not the build has assets it wants to publish to BAR
isAssetlessBuild: false
# Optional, publishing version
publishingVersion: 3
# Optional: A minimatch pattern for the asset manifests to publish to BAR
assetManifestsPattern: '*/manifests/**/*.xml'
repositoryAlias: self
officialBuildId: ''
jobs: jobs:
- job: Asset_Registry_Publish - job: Asset_Registry_Publish
@ -54,6 +64,11 @@ jobs:
value: false value: false
# unconditional - needed for logs publishing (redactor tool version) # unconditional - needed for logs publishing (redactor tool version)
- template: /eng/common/core-templates/post-build/common-variables.yml - template: /eng/common/core-templates/post-build/common-variables.yml
- name: OfficialBuildId
${{ if ne(parameters.officialBuildId, '') }}:
value: ${{ parameters.officialBuildId }}
${{ else }}:
value: $(Build.BuildNumber)
pool: pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
@ -72,18 +87,36 @@ jobs:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- checkout: self - checkout: ${{ parameters.repositoryAlias }}
fetchDepth: 3 fetchDepth: 3
clean: true clean: true
- task: DownloadBuildArtifacts@0 - ${{ if eq(parameters.isAssetlessBuild, 'false') }}:
displayName: Download artifact - ${{ if eq(parameters.publishingVersion, 3) }}:
inputs: - task: DownloadPipelineArtifact@2
artifactName: AssetManifests displayName: Download Asset Manifests
downloadPath: '$(Build.StagingDirectory)/Download' inputs:
checkDownloadedFiles: true artifactName: AssetManifests
condition: ${{ parameters.condition }} targetPath: '$(Build.StagingDirectory)/AssetManifests'
continueOnError: ${{ parameters.continueOnError }} condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if eq(parameters.publishingVersion, 4) }}:
- task: DownloadPipelineArtifact@2
displayName: Download V4 asset manifests
inputs:
itemPattern: '*/manifests/**/*.xml'
targetPath: '$(Build.StagingDirectory)/AllAssetManifests'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: CopyFiles@2
displayName: Copy V4 asset manifests to AssetManifests
inputs:
SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests'
Contents: ${{ parameters.assetManifestsPattern }}
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
flattenFolders: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1 - task: NuGetAuthenticate@1
@ -93,12 +126,12 @@ jobs:
azureSubscription: "Darc: Maestro Production" azureSubscription: "Darc: Maestro Production"
scriptType: ps scriptType: ps
scriptLocation: scriptPath scriptLocation: scriptPath
scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests'
/p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }}
/p:MaestroApiEndpoint=https://maestro.dot.net /p:MaestroApiEndpoint=https://maestro.dot.net
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} /p:OfficialBuildId=$(OfficialBuildId)
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }} condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }} continueOnError: ${{ parameters.continueOnError }}
@ -113,13 +146,24 @@ jobs:
Add-Content -Path $filePath -Value "$(DefaultChannels)" Add-Content -Path $filePath -Value "$(DefaultChannels)"
Add-Content -Path $filePath -Value $(IsStableBuild) Add-Content -Path $filePath -Value $(IsStableBuild)
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" $symbolExclusionfile = "$(System.DefaultWorkingDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if (Test-Path -Path $symbolExclusionfile) if (Test-Path -Path $symbolExclusionfile)
{ {
Write-Host "SymbolExclusionFile exists" Write-Host "SymbolExclusionFile exists"
Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs" Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
} }
- ${{ if eq(parameters.publishingVersion, 4) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
artifactName: AssetManifests
displayName: 'Publish Merged Manifest'
retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters: parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
@ -129,20 +173,25 @@ jobs:
publishLocation: Container publishLocation: Container
artifactName: ReleaseConfigs artifactName: ReleaseConfigs
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters: parameters:
BARBuildId: ${{ parameters.BARBuildId }} BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
# Darc is targeting 8.0, so make sure it's installed
- task: UseDotNet@2
inputs:
version: 8.0.x
- task: AzureCLI@2 - task: AzureCLI@2
displayName: Publish Using Darc displayName: Publish Using Darc
inputs: inputs:
azureSubscription: "Darc: Maestro Production" azureSubscription: "Darc: Maestro Production"
scriptType: ps scriptType: ps
scriptLocation: scriptPath scriptLocation: scriptPath
scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: > arguments: >
-BuildId $(BARBuildId) -BuildId $(BARBuildId)
-PublishingInfraVersion 3 -PublishingInfraVersion 3
@ -150,6 +199,7 @@ jobs:
-WaitPublishingFinish true -WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
-SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/core-templates/steps/publish-logs.yml - template: /eng/common/core-templates/steps/publish-logs.yml

7
eng/common/core-templates/job/source-build.yml

@ -12,9 +12,10 @@ parameters:
# The name of the job. This is included in the job ID. # The name of the job. This is included in the job ID.
# targetRID: '' # targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade. # The name of the target RID to use, instead of the one auto-detected by Arcade.
# nonPortable: false # portableBuild: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones. # linux-x64), and compiling against distro-provided packages rather than portable ones. The
# default is portable mode.
# skipPublishValidation: false # skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are # Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build. # published by source-build.
@ -26,6 +27,8 @@ parameters:
# Specifies the build script to invoke to perform the build in the repo. The default # Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for # './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations. # difficult situations.
# buildArguments: ''
# Specifies additional build arguments to pass to the build script.
# jobProperties: {} # jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond # A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool. # container and pool.

47
eng/common/core-templates/job/source-index-stage1.yml

@ -1,8 +1,5 @@
parameters: parameters:
runAsPublic: false runAsPublic: false
sourceIndexUploadPackageVersion: 2.0.0-20240522.1
sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: [] preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog binlogPath: artifacts/log/Debug/Build.binlog
@ -16,12 +13,6 @@ jobs:
dependsOn: ${{ parameters.dependsOn }} dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }} condition: ${{ parameters.condition }}
variables: variables:
- name: SourceIndexUploadPackageVersion
value: ${{ parameters.sourceIndexUploadPackageVersion }}
- name: SourceIndexProcessBinlogPackageVersion
value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- name: SourceIndexPackageSource
value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath - name: BinlogPath
value: ${{ parameters.binlogPath }} value: ${{ parameters.binlogPath }}
- template: /eng/common/core-templates/variables/pool-providers.yml - template: /eng/common/core-templates/variables/pool-providers.yml
@ -34,12 +25,10 @@ jobs:
pool: pool:
${{ if eq(variables['System.TeamProject'], 'public') }}: ${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool) name: $(DncEngPublicBuildPool)
image: 1es-windows-2022-open image: windows.vs2022.amd64.open
os: windows
${{ if eq(variables['System.TeamProject'], 'internal') }}: ${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool) name: $(DncEngInternalBuildPool)
image: 1es-windows-2022 image: windows.vs2022.amd64
os: windows
steps: steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}: - ${{ if eq(parameters.is1ESPipeline, '') }}:
@ -47,35 +36,9 @@ jobs:
- ${{ each preStep in parameters.preSteps }}: - ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }} - ${{ preStep }}
- task: UseDotNet@2
displayName: Use .NET 8 SDK
inputs:
packageType: sdk
version: 8.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: ${{ parameters.sourceIndexBuildCommand }} - script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository displayName: Build Repository
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
displayName: Process Binlog into indexable sln parameters:
binLogPath: ${{ parameters.binLogPath }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: AzureCLI@2
displayName: Log in to Azure and upload stage1 artifacts to source index
inputs:
azureSubscription: 'SourceDotNet Stage1 Publish'
addSpnToEnvironment: true
scriptType: 'ps'
scriptLocation: 'inlineScript'
inlineScript: |
$(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1

3
eng/common/core-templates/jobs/codeql-build.yml

@ -15,7 +15,6 @@ jobs:
enablePublishBuildArtifacts: false enablePublishBuildArtifacts: false
enablePublishTestResults: false enablePublishTestResults: false
enablePublishBuildAssets: false enablePublishBuildAssets: false
enablePublishUsingPipelines: false
enableTelemetry: true enableTelemetry: true
variables: variables:
@ -25,7 +24,7 @@ jobs:
- name: DefaultGuardianVersion - name: DefaultGuardianVersion
value: 0.109.0 value: 0.109.0
- name: GuardianPackagesConfigFile - name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion - name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}

19
eng/common/core-templates/jobs/jobs.yml

@ -5,9 +5,6 @@ parameters:
# Optional: Include PublishBuildArtifacts task # Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false enablePublishBuildArtifacts: false
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
# Optional: Enable running the source-build jobs to build repo from source # Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false enableSourceBuild: false
@ -30,6 +27,9 @@ parameters:
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false publishAssetsImmediately: false
# Optional: 🌤️ or not the build has assets it wants to publish to BAR
isAssetlessBuild: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: '' artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: '' signingValidationAdditionalParameters: ''
@ -43,6 +43,8 @@ parameters:
artifacts: {} artifacts: {}
is1ESPipeline: '' is1ESPipeline: ''
repositoryAlias: self
officialBuildId: ''
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, # Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds. # and some (Microbuild) should only be applied to non-PR cases for internal builds.
@ -83,7 +85,6 @@ jobs:
- template: /eng/common/core-templates/jobs/source-build.yml - template: /eng/common/core-templates/jobs/source-build.yml
parameters: parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}: ${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }} ${{ parameter.key }}: ${{ parameter.value }}
@ -96,7 +97,7 @@ jobs:
${{ parameter.key }}: ${{ parameter.value }} ${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}:
- template: ../job/publish-build-assets.yml - template: ../job/publish-build-assets.yml
parameters: parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
@ -108,12 +109,12 @@ jobs:
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}: - ${{ each job in parameters.jobs }}:
- ${{ job.job }} - ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }} runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }}
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} isAssetlessBuild: ${{ parameters.isAssetlessBuild }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
repositoryAlias: ${{ parameters.repositoryAlias }}
officialBuildId: ${{ parameters.officialBuildId }}

18
eng/common/core-templates/jobs/source-build.yml

@ -2,19 +2,13 @@ parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as # This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete. # well as an optional server job that completes when all platform jobs complete.
# The name of the "join" job for all source-build platforms. If set to empty string, the job is
# not included. Existing repo pipelines can use this job depend on all source-build jobs
# completing without maintaining a separate list of every single job ID: just depend on this one
# server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
allCompletedJobId: ''
# See /eng/common/core-templates/job/source-build.yml # See /eng/common/core-templates/job/source-build.yml
jobNamePrefix: 'Source_Build' jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo. # This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform: defaultManagedPlatform:
name: 'Managed' name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9' container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the # Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified, # object in this array is sent to the job template as 'platform'. If no platforms are specified,
@ -31,16 +25,6 @@ parameters:
jobs: jobs:
- ${{ if ne(parameters.allCompletedJobId, '') }}:
- job: ${{ parameters.allCompletedJobId }}
displayName: Source-Build Complete
pool: server
dependsOn:
- ${{ each platform in parameters.platforms }}:
- ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
- ${{ each platform in parameters.platforms }}: - ${{ each platform in parameters.platforms }}:
- template: /eng/common/core-templates/job/source-build.yml - template: /eng/common/core-templates/job/source-build.yml
parameters: parameters:

28
eng/common/core-templates/post-build/post-build.yml

@ -45,6 +45,11 @@ parameters:
type: boolean type: boolean
default: true default: true
- name: requireDefaultChannels
displayName: Fail the build if there are no default channel(s) registrations for the current build
type: boolean
default: false
- name: SDLValidationParameters - name: SDLValidationParameters
type: object type: object
default: default:
@ -55,6 +60,11 @@ parameters:
artifactNames: '' artifactNames: ''
downloadArtifacts: true downloadArtifacts: true
- name: isAssetlessBuild
type: boolean
displayName: Is Assetless Build
default: false
# These parameters let the user customize the call to sdk-task.ps1 for publishing # These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation # symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters - name: symbolPublishingAdditionalParameters
@ -144,7 +154,7 @@ stages:
- task: PowerShell@2 - task: PowerShell@2
displayName: Validate displayName: Validate
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- job: - job:
@ -183,9 +193,6 @@ stages:
buildId: $(AzDOBuildId) buildId: $(AzDOBuildId)
artifactName: PackageArtifacts artifactName: PackageArtifacts
checkDownloadedFiles: true checkDownloadedFiles: true
itemPattern: |
**
!**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed # This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
@ -201,7 +208,7 @@ stages:
filePath: eng\common\sdk-task.ps1 filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }} ${{ parameters.signingValidationAdditionalParameters }}
- template: /eng/common/core-templates/steps/publish-logs.yml - template: /eng/common/core-templates/steps/publish-logs.yml
@ -251,7 +258,7 @@ stages:
- task: PowerShell@2 - task: PowerShell@2
displayName: Validate displayName: Validate
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/ -ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name) -GHRepoName $(Build.Repository.Name)
@ -300,17 +307,24 @@ stages:
- task: NuGetAuthenticate@1 - task: NuGetAuthenticate@1
# Darc is targeting 8.0, so make sure it's installed
- task: UseDotNet@2
inputs:
version: 8.0.x
- task: AzureCLI@2 - task: AzureCLI@2
displayName: Publish Using Darc displayName: Publish Using Darc
inputs: inputs:
azureSubscription: "Darc: Maestro Production" azureSubscription: "Darc: Maestro Production"
scriptType: ps scriptType: ps
scriptLocation: scriptPath scriptLocation: scriptPath
scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: > arguments: >
-BuildId $(BARBuildId) -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(System.AccessToken)' -AzdoToken '$(System.AccessToken)'
-WaitPublishingFinish true -WaitPublishingFinish true
-RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
-SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'

2
eng/common/core-templates/post-build/setup-maestro-vars.yml

@ -36,7 +36,7 @@ steps:
$AzureDevOpsBuildId = $Env:Build_BuildId $AzureDevOpsBuildId = $Env:Build_BuildId
} }
else { else {
. $(Build.SourcesDirectory)\eng\common\tools.ps1 . $(System.DefaultWorkingDirectory)\eng\common\tools.ps1
$darc = Get-Darc $darc = Get-Darc
$buildInfo = & $darc get-build ` $buildInfo = & $darc get-build `
--id ${{ parameters.BARBuildId }} ` --id ${{ parameters.BARBuildId }} `

28
eng/common/core-templates/steps/cleanup-microbuild.yml

@ -0,0 +1,28 @@
parameters:
# Enable cleanup tasks for MicroBuild
enableMicrobuild: false
# Enable cleanup tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(
always(),
or(
and(
eq(variables['Agent.Os'], 'Windows_NT'),
in(variables['_SignType'], 'real', 'test')
),
and(
${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
ne(variables['Agent.Os'], 'Windows_NT'),
eq(variables['_SignType'], 'real')
)
))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)

12
eng/common/core-templates/steps/enable-internal-sources.yml

@ -17,8 +17,8 @@ steps:
- task: PowerShell@2 - task: PowerShell@2
displayName: Setup Internal Feeds displayName: Setup Internal Feeds
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
env: env:
Token: ${{ parameters.legacyCredential }} Token: ${{ parameters.legacyCredential }}
# If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate. # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
@ -29,8 +29,8 @@ steps:
- task: PowerShell@2 - task: PowerShell@2
displayName: Setup Internal Feeds displayName: Setup Internal Feeds
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config
- ${{ else }}: - ${{ else }}:
- template: /eng/common/templates/steps/get-federated-access-token.yml - template: /eng/common/templates/steps/get-federated-access-token.yml
parameters: parameters:
@ -39,8 +39,8 @@ steps:
- task: PowerShell@2 - task: PowerShell@2
displayName: Setup Internal Feeds displayName: Setup Internal Feeds
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
# This is required in certain scenarios to install the ADO credential provider. # This is required in certain scenarios to install the ADO credential provider.
# It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
# (e.g. dotnet msbuild). # (e.g. dotnet msbuild).

6
eng/common/core-templates/steps/generate-sbom.yml

@ -5,8 +5,8 @@
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters: parameters:
PackageVersion: 9.0.0 PackageVersion: 10.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts' BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
PackageName: '.NET' PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: '' IgnoreDirectories: ''
@ -38,7 +38,7 @@ steps:
PackageName: ${{ parameters.packageName }} PackageName: ${{ parameters.packageName }}
BuildDropPath: ${{ parameters.buildDropPath }} BuildDropPath: ${{ parameters.buildDropPath }}
PackageVersion: ${{ parameters.packageVersion }} PackageVersion: ${{ parameters.packageVersion }}
ManifestDirPath: ${{ parameters.manifestDirPath }} ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME)
${{ if ne(parameters.IgnoreDirectories, '') }}: ${{ if ne(parameters.IgnoreDirectories, '') }}:
AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'

11
eng/common/core-templates/steps/get-delegation-sas.yml

@ -31,16 +31,7 @@ steps:
# Calculate the expiration of the SAS token and convert to UTC # Calculate the expiration of the SAS token and convert to UTC
$expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
# Temporarily work around a helix issue where SAS tokens with / in them will cause incorrect downloads $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
# of correlation payloads. https://github.com/dotnet/dnceng/issues/3484
$sas = ""
do {
$sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to generate SAS token."
exit 1
}
} while($sas.IndexOf('/') -ne -1)
if ($LASTEXITCODE -ne 0) { if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to generate SAS token." Write-Error "Failed to generate SAS token."

90
eng/common/core-templates/steps/install-microbuild.yml

@ -0,0 +1,90 @@
parameters:
# Enable install tasks for MicroBuild
enableMicrobuild: false
# Enable install tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
# Determines whether the ESRP service connection information should be passed to the signing plugin.
# This overlaps with _SignType to some degree. We only need the service connection for real signing.
# It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place.
# Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod.
# Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The
# variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough.
microbuildUseESRP: true
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
# Installing .NET 8 is required to use the MicroBuild signing plugin on non-Windows platforms
- task: UseDotNet@2
displayName: Install .NET 8.0 SDK for MicroBuild Plugin
inputs:
packageType: sdk
version: 8.0.x
# Installing the SDK in a '.dotnet-microbuild' directory is required for signing.
# See target FindDotNetPathForMicroBuild in arcade/src/Microsoft.DotNet.Arcade.Sdk/tools/Sign.proj
# Do not remove '.dotnet-microbuild' from the path without changing the corresponding logic.
installationPath: $(Agent.TempDirectory)/.dotnet-microbuild
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- script: |
REM Check if ESRP is disabled while SignType is real
if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" (
echo Error: ESRP must be enabled when SignType is real.
exit /b 1
)
displayName: 'Validate ESRP usage (Windows)'
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- script: |
# Check if ESRP is disabled while SignType is real
if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then
echo "Error: ESRP must be enabled when SignType is real."
exit 1
fi
displayName: 'Validate ESRP usage (Non-Windows)'
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
# Two different MB install steps. This is due to not being able to use the agent OS during
# YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
# we can avoid including the MB install step if not enabled at all. This avoids a bunch of
# extra pipeline authorizations, since most pipelines do not sign on non-Windows.
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin (Windows)
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
${{ if eq(parameters.microbuildUseESRP, true) }}:
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
${{ else }}:
ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin (non-Windows)
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
${{ if eq(parameters.microbuildUseESRP, true) }}:
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39
${{ else }}:
ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real'))

17
eng/common/core-templates/steps/publish-logs.yml

@ -12,29 +12,31 @@ steps:
inputs: inputs:
targetType: inline targetType: inline
script: | script: |
New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true continueOnError: true
condition: always() condition: always()
- task: PowerShell@2 - task: PowerShell@2
displayName: Redact Logs displayName: Redact Logs
inputs: inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1
# For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
# Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' # Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
# If the file exists - sensitive data for redaction will be sourced from it # If the file exists - sensitive data for redaction will be sourced from it
# (single entry per line, lines starting with '# ' are considered comments and skipped) # (single entry per line, lines starting with '# ' are considered comments and skipped)
arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
-BinlogToolVersion ${{parameters.BinlogToolVersion}} -BinlogToolVersion ${{parameters.BinlogToolVersion}}
-TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
'$(publishing-dnceng-devdiv-code-r-build-re)' '$(publishing-dnceng-devdiv-code-r-build-re)'
'$(MaestroAccessToken)' '$(MaestroAccessToken)'
'$(dn-bot-all-orgs-artifact-feeds-rw)' '$(dn-bot-all-orgs-artifact-feeds-rw)'
'$(akams-client-id)' '$(akams-client-id)'
'$(microsoft-symbol-server-pat)' '$(microsoft-symbol-server-pat)'
'$(symweb-symbol-server-pat)' '$(symweb-symbol-server-pat)'
'$(dnceng-symbol-server-pat)'
'$(dn-bot-all-orgs-build-rw-code-rw)' '$(dn-bot-all-orgs-build-rw-code-rw)'
'$(System.AccessToken)'
${{parameters.CustomSensitiveDataList}} ${{parameters.CustomSensitiveDataList}}
continueOnError: true continueOnError: true
condition: always() condition: always()
@ -42,9 +44,10 @@ steps:
- task: CopyFiles@2 - task: CopyFiles@2
displayName: Gather post build logs displayName: Gather post build logs
inputs: inputs:
SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
Contents: '**' Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
condition: always()
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters: parameters:

82
eng/common/core-templates/steps/source-build.yml

@ -19,25 +19,12 @@ steps:
set -x set -x
df -h df -h
# If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo.
internalRestoreArgs=
if ! git diff --quiet; then
internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
# The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
# This only works if there is a username/email configured, which won't be the case in most CI runs.
git config --get user.email
if [ $? -ne 0 ]; then
git config user.email dn-bot@microsoft.com
git config user.name dn-bot
fi
fi
# If building on the internal project, the internal storage variable may be available (usually only if needed) # If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations. # in the default public locations.
internalRuntimeDownloadArgs= internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
fi fi
buildConfig=Release buildConfig=Release
@ -46,84 +33,33 @@ steps:
buildConfig='$(_BuildConfig)' buildConfig='$(_BuildConfig)'
fi fi
officialBuildArgs=
if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
fi
targetRidArgs= targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi fi
runtimeOsArgs= portableBuildArgs=
if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
fi
baseOsArgs=
if [ '${{ parameters.platform.baseOS }}' != '' ]; then
baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
fi
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
fi
assetManifestFileName=SourceBuild_RidSpecific.xml
if [ '${{ parameters.platform.name }}' != '' ]; then
assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
fi fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \ --configuration $buildConfig \
--restore --build --pack $publishArgs -bl \ --restore --build --pack -bl \
$officialBuildArgs \ --source-build \
${{ parameters.platform.buildArguments }} \
$internalRuntimeDownloadArgs \ $internalRuntimeDownloadArgs \
$internalRestoreArgs \
$targetRidArgs \ $targetRidArgs \
$runtimeOsArgs \ $portableBuildArgs \
$baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
/p:DotNetBuildSourceOnly=true \
/p:DotNetBuildRepo=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build displayName: Build
# Upload build logs for diagnosis.
- task: CopyFiles@2
displayName: Prepare BuildLogs staging directory
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
Contents: |
**/*.log
**/*.binlog
artifacts/sb/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
condition: succeededOrFailed()
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters: parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }} is1ESPipeline: ${{ parameters.is1ESPipeline }}
args: args:
displayName: Publish BuildLogs displayName: Publish BuildLogs
targetPath: '$(Build.StagingDirectory)/BuildLogs' targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true continueOnError: true
condition: succeededOrFailed() condition: succeededOrFailed()
sbomEnabled: false # we don't need SBOM for logs sbomEnabled: false # we don't need SBOM for logs
# Manually inject component detection so that we can ignore the source build upstream cache, which contains
# a nupkg cache of input packages (a local feed).
# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
- template: /eng/common/core-templates/steps/component-governance.yml
parameters:
displayName: Component Detection (Exclude upstream cache)
is1ESPipeline: ${{ parameters.is1ESPipeline }}
componentGovernanceIgnoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }}

35
eng/common/core-templates/steps/source-index-stage1-publish.yml

@ -0,0 +1,35 @@
parameters:
sourceIndexUploadPackageVersion: 2.0.0-20250818.1
sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
binlogPath: artifacts/log/Debug/Build.binlog
steps:
- task: UseDotNet@2
displayName: "Source Index: Use .NET 9 SDK"
inputs:
packageType: sdk
version: 9.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: "Source Index: Download netsourceindex Tools"
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: "Source Index: Process Binlog into indexable sln"
- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: AzureCLI@2
displayName: "Source Index: Upload Source Index stage1 artifacts to Azure"
inputs:
azureSubscription: 'SourceDotNet Stage1 Publish'
addSpnToEnvironment: true
scriptType: 'ps'
scriptLocation: 'inlineScript'
inlineScript: |
$(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1

0
eng/common/cross/arm/tizen-build-rootfs.sh

0
eng/common/cross/arm/tizen-fetch.sh

2
eng/common/cross/arm64/tizen/tizen.patch

@ -5,5 +5,5 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
Use the shared library, but some functions are only in Use the shared library, but some functions are only in
the static library, so try that secondarily. */ the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleaarch64) OUTPUT_FORMAT(elf64-littleaarch64)
-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) ) -GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )

49
eng/common/cross/build-android-rootfs.sh

@ -6,10 +6,11 @@ usage()
{ {
echo "Creates a toolchain and sysroot used for cross-compiling for Android." echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo echo
echo "Usage: $0 [BuildArch] [ApiLevel]" echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]"
echo echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history"
echo echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable" echo "by setting the TOOLCHAIN_DIR environment variable"
@ -25,10 +26,15 @@ __BuildArch=arm64
__AndroidArch=aarch64 __AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android __AndroidToolchain=aarch64-linux-android
for i in "$@" while :; do
do if [[ "$#" -le 0 ]]; then
lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" break
case $lowerI in fi
i=$1
lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-?|-h|--help) -?|-h|--help)
usage usage
exit 1 exit 1
@ -43,6 +49,10 @@ for i in "$@"
__AndroidArch=arm __AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi __AndroidToolchain=arm-linux-androideabi
;; ;;
--ndk)
shift
__NDK_Version=$1
;;
*[0-9]) *[0-9])
__ApiLevel=$i __ApiLevel=$i
;; ;;
@ -50,8 +60,17 @@ for i in "$@"
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;; ;;
esac esac
shift
done done
if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then
__NDK_File_Arch_Spec=-x86_64
__SysRoot=sysroot
else
__NDK_File_Arch_Spec=
__SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot
fi
# Obtain the location of the bash script to figure out where the root of the repo is. # Obtain the location of the bash script to figure out where the root of the repo is.
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" __ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
@ -78,6 +97,7 @@ fi
echo "Target API level: $__ApiLevel" echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch" echo "Target architecture: $__BuildArch"
echo "NDK version: $__NDK_Version"
echo "NDK location: $__NDK_Dir" echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir" echo "Target Toolchain location: $__ToolchainDir"
@ -85,8 +105,8 @@ echo "Target Toolchain location: $__ToolchainDir"
if [ ! -d $__NDK_Dir ]; then if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir mkdir -p $__NDK_Dir
wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip
unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir
fi fi
if [ ! -d $__lldb_Dir ]; then if [ ! -d $__lldb_Dir ]; then
@ -116,16 +136,11 @@ for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/
fi fi
done done
cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/" cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/"
# Generate platform file for build.sh script to assign to __DistroRid # Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..." echo "Generating platform file..."
echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform
echo "Now to build coreclr, libraries and installers; run:" echo "Now to build coreclr, libraries and host; run:"
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch
--subsetCategory coreclr
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
--subsetCategory libraries
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
--subsetCategory installer

235
eng/common/cross/build-rootfs.sh

@ -5,7 +5,7 @@ set -e
usage() usage()
{ {
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]" echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge" echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14" echo " for FreeBSD can be: freebsd13, freebsd14"
@ -15,6 +15,7 @@ usage()
echo "llvmx[.y] - optional, LLVM version for LLVM related packages." echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder." echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)."
echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available." echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs." echo "--jobs N - optional, restrict to N jobs."
exit 1 exit 1
@ -52,28 +53,27 @@ __UbuntuPackages+=" symlinks"
__UbuntuPackages+=" libicu-dev" __UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev" __UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev" __UbuntuPackages+=" libunwind8-dev"
__UbuntuPackages+=" libnuma-dev"
__AlpinePackages+=" gettext-dev" __AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev" __AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev" __AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev" __AlpinePackages+=" lttng-ust-dev"
__AlpinePackages+=" compiler-rt" __AlpinePackages+=" compiler-rt"
__AlpinePackages+=" numactl-dev"
# runtime libraries' dependencies # runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev" __UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev" __UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev" __UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev" __UbuntuPackages+=" zlib1g-dev"
__UbuntuPackages+=" libbrotli-dev"
__AlpinePackages+=" curl-dev" __AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev" __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev" __AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev" __AlpinePackages+=" zlib-dev"
__FreeBSDBase="13.3-RELEASE" __FreeBSDBase="13.4-RELEASE"
__FreeBSDPkg="1.17.0" __FreeBSDPkg="1.21.3"
__FreeBSDABI="13" __FreeBSDABI="13"
__FreeBSDPackages="libunwind" __FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu" __FreeBSDPackages+=" icu"
@ -91,18 +91,18 @@ __HaikuPackages="gcc_syslibs"
__HaikuPackages+=" gcc_syslibs_devel" __HaikuPackages+=" gcc_syslibs_devel"
__HaikuPackages+=" gmp" __HaikuPackages+=" gmp"
__HaikuPackages+=" gmp_devel" __HaikuPackages+=" gmp_devel"
__HaikuPackages+=" icu66" __HaikuPackages+=" icu[0-9]+"
__HaikuPackages+=" icu66_devel" __HaikuPackages+=" icu[0-9]*_devel"
__HaikuPackages+=" krb5" __HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel" __HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv" __HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel" __HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm12_libunwind" __HaikuPackages+=" llvm[0-9]*_libunwind"
__HaikuPackages+=" llvm12_libunwind_devel" __HaikuPackages+=" llvm[0-9]*_libunwind_devel"
__HaikuPackages+=" mpfr" __HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel" __HaikuPackages+=" mpfr_devel"
__HaikuPackages+=" openssl" __HaikuPackages+=" openssl3"
__HaikuPackages+=" openssl_devel" __HaikuPackages+=" openssl3_devel"
__HaikuPackages+=" zlib" __HaikuPackages+=" zlib"
__HaikuPackages+=" zlib_devel" __HaikuPackages+=" zlib_devel"
@ -128,10 +128,12 @@ __AlpineKeys='
616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== 616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ==
616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== 616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ==
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== 616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ==
' '
__Keyring= __Keyring=
__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" __KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0 __SkipSigCheck=0
__SkipEmulation=0
__UseMirror=0 __UseMirror=0
__UnprocessedBuildArgs= __UnprocessedBuildArgs=
@ -162,9 +164,13 @@ while :; do
armel) armel)
__BuildArch=armel __BuildArch=armel
__UbuntuArch=armel __UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/" __UbuntuRepo="http://archive.debian.org/debian/"
__CodeName=jessie __CodeName=buster
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
__LLDB_Package="liblldb-6.0-dev"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
__UbuntuSuites=
;; ;;
armv6) armv6)
__BuildArch=armv6 __BuildArch=armv6
@ -180,6 +186,18 @@ while :; do
__Keyring="--keyring $__KeyringFile" __Keyring="--keyring $__KeyringFile"
fi fi
;; ;;
loongarch64)
__BuildArch=loongarch64
__AlpineArch=loongarch64
__QEMUArch=loongarch64
__UbuntuArch=loong64
__UbuntuSuites=unreleased
__LLDB_Package="liblldb-19-dev"
if [[ "$__CodeName" == "sid" ]]; then
__UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
fi
;;
riscv64) riscv64)
__BuildArch=riscv64 __BuildArch=riscv64
__AlpineArch=riscv64 __AlpineArch=riscv64
@ -264,44 +282,21 @@ while :; do
;; ;;
xenial) # Ubuntu 16.04 xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then __CodeName=xenial
__CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=zesty
fi
;; ;;
bionic) # Ubuntu 18.04 bionic) # Ubuntu 18.04
if [[ "$__CodeName" != "jessie" ]]; then __CodeName=bionic
__CodeName=bionic
fi
;; ;;
focal) # Ubuntu 20.04 focal) # Ubuntu 20.04
if [[ "$__CodeName" != "jessie" ]]; then __CodeName=focal
__CodeName=focal
fi
;; ;;
jammy) # Ubuntu 22.04 jammy) # Ubuntu 22.04
if [[ "$__CodeName" != "jessie" ]]; then __CodeName=jammy
__CodeName=jammy
fi
;; ;;
noble) # Ubuntu 24.04 noble) # Ubuntu 24.04
if [[ "$__CodeName" != "jessie" ]]; then __CodeName=noble
__CodeName=noble if [[ -z "$__LLDB_Package" ]]; then
fi __LLDB_Package="liblldb-19-dev"
if [[ -n "$__LLDB_Package" ]]; then
__LLDB_Package="liblldb-18-dev"
fi
;;
jessie) # Debian 8
__CodeName=jessie
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi fi
;; ;;
stretch) # Debian 9 stretch) # Debian 9
@ -319,7 +314,7 @@ while :; do
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/" __UbuntuRepo="http://archive.debian.org/debian/"
fi fi
;; ;;
bullseye) # Debian 11 bullseye) # Debian 11
@ -340,10 +335,28 @@ while :; do
;; ;;
sid) # Debian sid sid) # Debian sid
__CodeName=sid __CodeName=sid
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" __UbuntuSuites=
if [[ -z "$__UbuntuRepo" ]]; then # Debian-Ports architectures need different values
__UbuntuRepo="http://ftp.debian.org/debian/" case "$__UbuntuArch" in
amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x)
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
*)
__KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
fi
;;
esac
if [[ -e "$__KeyringFile" ]]; then
__Keyring="--keyring $__KeyringFile"
fi fi
;; ;;
tizen) tizen)
@ -370,7 +383,7 @@ while :; do
;; ;;
freebsd14) freebsd14)
__CodeName=freebsd __CodeName=freebsd
__FreeBSDBase="14.0-RELEASE" __FreeBSDBase="14.2-RELEASE"
__FreeBSDABI="14" __FreeBSDABI="14"
__SkipUnmount=1 __SkipUnmount=1
;; ;;
@ -388,6 +401,9 @@ while :; do
--skipsigcheck) --skipsigcheck)
__SkipSigCheck=1 __SkipSigCheck=1
;; ;;
--skipemulation)
__SkipEmulation=1
;;
--rootfsdir|-rootfsdir) --rootfsdir|-rootfsdir)
shift shift
__RootfsDir="$1" __RootfsDir="$1"
@ -420,16 +436,15 @@ case "$__AlpineVersion" in
elif [[ "$__AlpineArch" == "x86" ]]; then elif [[ "$__AlpineArch" == "x86" ]]; then
__AlpineVersion=3.17 # minimum version that supports lldb-dev __AlpineVersion=3.17 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm15-libs" __AlpinePackages+=" llvm15-libs"
elif [[ "$__AlpineArch" == "riscv64" ]]; then elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then
__AlpineVersion=3.21 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm19-libs"
elif [[ -n "$__AlpineMajorVersion" ]]; then
# use whichever alpine version is provided and select the latest toolchain libs
__AlpineLlvmLibsLookup=1 __AlpineLlvmLibsLookup=1
__AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive)
else else
__AlpineVersion=3.13 # 3.13 to maximize compatibility __AlpineVersion=3.13 # 3.13 to maximize compatibility
__AlpinePackages+=" llvm10-libs" __AlpinePackages+=" llvm10-libs"
if [[ "$__AlpineArch" == "armv7" ]]; then
__AlpinePackages="${__AlpinePackages//numactl-dev/}"
fi
fi fi
esac esac
@ -439,15 +454,6 @@ if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then
__AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}"
fi fi
if [[ "$__BuildArch" == "armel" ]]; then
__LLDB_Package="lldb-3.5-dev"
fi
if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then
# libnuma-dev is not available on armhf for xenial
__UbuntuPackages="${__UbuntuPackages//libnuma-dev/}"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}" __UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -z "$__UbuntuRepo" ]]; then if [[ -z "$__UbuntuRepo" ]]; then
@ -512,11 +518,6 @@ if [[ "$__CodeName" == "alpine" ]]; then
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static" chmod +x "$__ApkToolsDir/apk.static"
if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then
mkdir -p "$__RootfsDir"/usr/bin
cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
fi
if [[ "$__AlpineVersion" == "edge" ]]; then if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge version=edge
else else
@ -536,6 +537,10 @@ if [[ "$__CodeName" == "alpine" ]]; then
__ApkSignatureArg="--keys-dir $__ApkKeysDir" __ApkSignatureArg="--keys-dir $__ApkKeysDir"
fi fi
if [[ "$__SkipEmulation" == "1" ]]; then
__NoEmulationArg="--no-scripts"
fi
# initialize DB # initialize DB
# shellcheck disable=SC2086 # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \ "$__ApkToolsDir/apk.static" \
@ -557,7 +562,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
"$__ApkToolsDir/apk.static" \ "$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \
add $__AlpinePackages add $__AlpinePackages
rm -r "$__ApkToolsDir" rm -r "$__ApkToolsDir"
@ -573,7 +578,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
fi fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp mkdir -p "$__RootfsDir"/tmp
# get and build package manager # get and build package manager
if [[ "$__hasWget" == 1 ]]; then if [[ "$__hasWget" == 1 ]]; then
@ -681,7 +686,7 @@ elif [[ "$__CodeName" == "haiku" ]]; then
ensureDownloadTool ensureDownloadTool
echo "Downloading Haiku package tool" echo "Downloading Haiku package tools"
git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script" git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
if [[ "$__hasWget" == 1 ]]; then if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
@ -691,34 +696,42 @@ elif [[ "$__CodeName" == "haiku" ]]; then
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current"
echo "Downloading HaikuPorts package repository index..."
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
else
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
fi
# Download Haiku packages
echo "Downloading Haiku packages" echo "Downloading Haiku packages"
read -ra array <<<"$__HaikuPackages" read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do for package in "${array[@]}"; do
echo "Downloading $package..." echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" |
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 grep -E "${package}-" | sort -V | tail -n 1 | xargs)"
if [ -z "$hpkgFilename" ]; then
>&2 echo "ERROR: package $package missing."
exit 1
fi
echo "Resolved filename: $hpkgFilename..."
hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename"
if [[ "$__hasWget" == 1 ]]; then if [[ "$__hasWget" == 1 ]]; then
hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
else else
hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
fi fi
done done
for package in haiku haiku_devel; do for package in haiku haiku_devel; do
echo "Downloading $package..." echo "Downloading $package..."
if [[ "$__hasWget" == 1 ]]; then if [[ "$__hasWget" == 1 ]]; then
hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
else else
hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
fi fi
done done
@ -744,25 +757,67 @@ elif [[ "$__CodeName" == "haiku" ]]; then
popd popd
rm -rf "$__RootfsDir/tmp" rm -rf "$__RootfsDir/tmp"
elif [[ -n "$__CodeName" ]]; then elif [[ -n "$__CodeName" ]]; then
__Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)"
if [[ "$__SkipEmulation" == "1" ]]; then
if [[ -z "$AR" ]]; then
if command -v ar &>/dev/null; then
AR="$(command -v ar)"
elif command -v llvm-ar &>/dev/null; then
AR="$(command -v llvm-ar)"
else
echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool"
exit 1
fi
fi
PYTHON=${PYTHON_EXECUTABLE:-python3}
# shellcheck disable=SC2086,SC2046
echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
$(for suite in $__Suites; do echo -n "--suite $suite "; done) \
$__UbuntuPackages
# shellcheck disable=SC2086,SC2046
"$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
$(for suite in $__Suites; do echo -n "--suite $suite "; done) \
$__UbuntuPackages
exit 0
fi
__UpdateOptions=
if [[ "$__SkipSigCheck" == "0" ]]; then if [[ "$__SkipSigCheck" == "0" ]]; then
__Keyring="$__Keyring --force-check-gpg" __Keyring="$__Keyring --force-check-gpg"
else
__Keyring=
__UpdateOptions="--allow-unauthenticated --allow-insecure-repositories"
fi fi
# shellcheck disable=SC2086 # shellcheck disable=SC2086
echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
# shellcheck disable=SC2086
if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then
echo "debootstrap failed! dumping debootstrap.log"
cat "$__RootfsDir/debootstrap/debootstrap.log"
exit 1
fi
rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d
mkdir -p "$__RootfsDir/etc/apt/sources.list.d/" mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
# shellcheck disable=SC2086
cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" <<EOF cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" <<EOF
Types: deb Types: deb
URIs: $__UbuntuRepo URIs: $__UbuntuRepo
Suites: $__CodeName $(echo $__UbuntuSuites | xargs -n 1 | xargs -I {} echo -n "$__CodeName-{} ") Suites: $__Suites
Components: main universe Components: main universe
Signed-By: $__KeyringFile Signed-By: $__KeyringFile
EOF EOF
chroot "$__RootfsDir" apt-get update # shellcheck disable=SC2086
chroot "$__RootfsDir" apt-get update $__UpdateOptions
chroot "$__RootfsDir" apt-get -f -y install chroot "$__RootfsDir" apt-get -f -y install
# shellcheck disable=SC2086 # shellcheck disable=SC2086
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
@ -772,12 +827,6 @@ EOF
if [[ "$__SkipUnmount" == "0" ]]; then if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true umount "$__RootfsDir"/* || true
fi fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd "$__RootfsDir"
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
popd
fi
elif [[ "$__Tizen" == "tizen" ]]; then elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else else

334
eng/common/cross/install-debs.py

@ -0,0 +1,334 @@
#!/usr/bin/env python3
import argparse
import asyncio
import aiohttp
import gzip
import os
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
import zstandard
from collections import deque
from functools import cmp_to_key
async def download_file(session, url, dest_path, max_retries=3, retry_delay=2, timeout=60):
"""Asynchronous file download with retries."""
attempt = 0
while attempt < max_retries:
try:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=timeout)) as response:
if response.status == 200:
with open(dest_path, "wb") as f:
content = await response.read()
f.write(content)
print(f"Downloaded {url} at {dest_path}")
return
else:
print(f"Failed to download {url}, Status Code: {response.status}")
break
except (asyncio.CancelledError, asyncio.TimeoutError, aiohttp.ClientError) as e:
print(f"Error downloading {url}: {type(e).__name__} - {e}. Retrying...")
attempt += 1
await asyncio.sleep(retry_delay)
print(f"Failed to download {url} after {max_retries} attempts.")
async def download_deb_files_parallel(mirror, packages, tmp_dir):
"""Download .deb files in parallel."""
os.makedirs(tmp_dir, exist_ok=True)
tasks = []
timeout = aiohttp.ClientTimeout(total=60)
async with aiohttp.ClientSession(timeout=timeout) as session:
for pkg, info in packages.items():
filename = info.get("Filename")
if filename:
url = f"{mirror}/{filename}"
dest_path = os.path.join(tmp_dir, os.path.basename(filename))
tasks.append(asyncio.create_task(download_file(session, url, dest_path)))
await asyncio.gather(*tasks)
async def download_package_index_parallel(mirror, arch, suites):
"""Download package index files for specified suites and components entirely in memory."""
tasks = []
timeout = aiohttp.ClientTimeout(total=60)
async with aiohttp.ClientSession(timeout=timeout) as session:
for suite in suites:
for component in ["main", "universe"]:
url = f"{mirror}/dists/{suite}/{component}/binary-{arch}/Packages.gz"
tasks.append(fetch_and_decompress(session, url))
results = await asyncio.gather(*tasks, return_exceptions=True)
merged_content = ""
for result in results:
if isinstance(result, str):
if merged_content:
merged_content += "\n\n"
merged_content += result
return merged_content
async def fetch_and_decompress(session, url):
"""Fetch and decompress the Packages.gz file."""
try:
async with session.get(url) as response:
if response.status == 200:
compressed_data = await response.read()
decompressed_data = gzip.decompress(compressed_data).decode('utf-8')
print(f"Downloaded index: {url}")
return decompressed_data
else:
print(f"Skipped index: {url} (doesn't exist)")
return None
except Exception as e:
print(f"Error fetching {url}: {e}")
def parse_debian_version(version):
"""Parse a Debian package version into epoch, upstream version, and revision."""
match = re.match(r'^(?:(\d+):)?([^-]+)(?:-(.+))?$', version)
if not match:
raise ValueError(f"Invalid Debian version format: {version}")
epoch, upstream, revision = match.groups()
return int(epoch) if epoch else 0, upstream, revision or ""
def compare_upstream_version(v1, v2):
"""Compare upstream or revision parts using Debian rules."""
def tokenize(version):
tokens = re.split(r'([0-9]+|[A-Za-z]+)', version)
return [int(x) if x.isdigit() else x for x in tokens if x]
tokens1 = tokenize(v1)
tokens2 = tokenize(v2)
for token1, token2 in zip(tokens1, tokens2):
if type(token1) == type(token2):
if token1 != token2:
return (token1 > token2) - (token1 < token2)
else:
return -1 if isinstance(token1, str) else 1
return len(tokens1) - len(tokens2)
def compare_debian_versions(version1, version2):
"""Compare two Debian package versions."""
epoch1, upstream1, revision1 = parse_debian_version(version1)
epoch2, upstream2, revision2 = parse_debian_version(version2)
if epoch1 != epoch2:
return epoch1 - epoch2
result = compare_upstream_version(upstream1, upstream2)
if result != 0:
return result
return compare_upstream_version(revision1, revision2)
def resolve_dependencies(packages, aliases, desired_packages):
"""Recursively resolves dependencies for the desired packages."""
resolved = []
to_process = deque(desired_packages)
while to_process:
current = to_process.popleft()
resolved_package = current if current in packages else aliases.get(current, [None])[0]
if not resolved_package:
print(f"Error: Package '{current}' was not found in the available packages.")
sys.exit(1)
if resolved_package not in resolved:
resolved.append(resolved_package)
deps = packages.get(resolved_package, {}).get("Depends", "")
if deps:
deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep]
for dep in deps:
if dep not in resolved and dep not in to_process and dep in packages:
to_process.append(dep)
return resolved
def parse_package_index(content):
"""Parses the Packages.gz file and returns package information."""
packages = {}
aliases = {}
entries = re.split(r'\n\n+', content)
for entry in entries:
fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE))
if "Package" in fields:
package_name = fields["Package"]
version = fields.get("Version")
filename = fields.get("Filename")
depends = fields.get("Depends")
provides = fields.get("Provides", None)
# Only update if package_name is not in packages or if the new version is higher
if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0:
packages[package_name] = {
"Version": version,
"Filename": filename,
"Depends": depends
}
# Update aliases if package provides any alternatives
if provides:
provides_list = [x.strip() for x in provides.split(",")]
for alias in provides_list:
# Strip version specifiers
alias_name = re.sub(r'\s*\(=.*\)', '', alias)
if alias_name not in aliases:
aliases[alias_name] = []
if package_name not in aliases[alias_name]:
aliases[alias_name].append(package_name)
return packages, aliases
def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages):
"""Downloads .deb files and extracts them."""
resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages)
print(f"Resolved packages (including dependencies): {resolved_packages}")
packages_to_download = {}
for pkg in resolved_packages:
if pkg in packages_info:
packages_to_download[pkg] = packages_info[pkg]
if pkg in aliases:
for alias in aliases[pkg]:
if alias in packages_info:
packages_to_download[alias] = packages_info[alias]
asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir))
package_to_deb_file_map = {}
for pkg in resolved_packages:
pkg_info = packages_info.get(pkg)
if pkg_info:
deb_filename = pkg_info.get("Filename")
if deb_filename:
deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename))
package_to_deb_file_map[pkg] = deb_file_path
for pkg in reversed(resolved_packages):
deb_file = package_to_deb_file_map.get(pkg)
if deb_file and os.path.exists(deb_file):
extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool)
print("All done!")
def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool):
"""Extract .deb file contents"""
os.makedirs(extract_dir, exist_ok=True)
with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir:
result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
tar_filename = None
for line in result.stdout.decode().splitlines():
if line.startswith("data.tar"):
tar_filename = line.strip()
break
if not tar_filename:
raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.")
tar_file_path = os.path.join(tmp_subdir, tar_filename)
print(f"Extracting {tar_filename} from {deb_file}..")
subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True)
file_extension = os.path.splitext(tar_file_path)[1].lower()
if file_extension == ".xz":
mode = "r:xz"
elif file_extension == ".gz":
mode = "r:gz"
elif file_extension == ".zst":
# zstd is not supported by standard library yet
decompressed_tar_path = tar_file_path.replace(".zst", "")
with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file:
dctx = zstandard.ZstdDecompressor()
dctx.copy_stream(zst_file, decompressed_file)
tar_file_path = decompressed_tar_path
mode = "r"
else:
raise ValueError(f"Unsupported compression format: {file_extension}")
with tarfile.open(tar_file_path, mode) as tar:
tar.extractall(path=extract_dir, filter='fully_trusted')
def finalize_setup(rootfsdir):
lib_dir = os.path.join(rootfsdir, 'lib')
usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib')
if os.path.exists(lib_dir):
if os.path.islink(lib_dir):
os.remove(lib_dir)
else:
os.makedirs(usr_lib_dir, exist_ok=True)
for item in os.listdir(lib_dir):
src = os.path.join(lib_dir, item)
dest = os.path.join(usr_lib_dir, item)
if os.path.isdir(src):
shutil.copytree(src, dest, dirs_exist_ok=True)
else:
shutil.copy2(src, dest)
shutil.rmtree(lib_dir)
os.symlink(usr_lib_dir, lib_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS")
parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)")
parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)")
parser.add_argument("--rootfsdir", required=True, help="Destination directory.")
parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.')
parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)")
parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)")
parser.add_argument("packages", nargs="+", help="List of package names to be installed.")
args = parser.parse_args()
if args.mirror is None:
if args.distro == "ubuntu":
args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports"
elif args.distro == "debian":
args.mirror = "http://ftp.debian.org/debian-ports"
else:
raise Exception("Unsupported distro")
DESIRED_PACKAGES = args.packages + [ # base packages
"dpkg",
"busybox",
"libc-bin",
"base-files",
"base-passwd",
"debianutils"
]
print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}")
package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite))
packages_info, aliases = parse_package_index(package_index_content)
with tempfile.TemporaryDirectory() as tmp_dir:
install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES)
finalize_setup(args.rootfsdir)

0
eng/common/cross/tizen-build-rootfs.sh

9
eng/common/cross/tizen-fetch.sh

@ -156,13 +156,8 @@ fetch_tizen_pkgs()
done done
} }
if [ "$TIZEN_ARCH" == "riscv64" ]; then BASE="Tizen-Base"
BASE="Tizen-Base-RISCV" UNIFIED="Tizen-Unified"
UNIFIED="Tizen-Unified-RISCV"
else
BASE="Tizen-Base"
UNIFIED="Tizen-Unified"
fi
Inform "Initialize ${TIZEN_ARCH} base" Inform "Initialize ${TIZEN_ARCH} base"
fetch_tizen_pkgs_init standard $BASE fetch_tizen_pkgs_init standard $BASE

82
eng/common/cross/toolchain.cmake

@ -67,6 +67,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6")
else() else()
set(TOOLCHAIN "arm-linux-gnueabihf") set(TOOLCHAIN "arm-linux-gnueabihf")
endif() endif()
elseif(TARGET_ARCH_NAME STREQUAL "loongarch64")
set(CMAKE_SYSTEM_PROCESSOR "loongarch64")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl)
set(TOOLCHAIN "loongarch64-alpine-linux-musl")
else()
set(TOOLCHAIN "loongarch64-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
set(CMAKE_SYSTEM_PROCESSOR ppc64le) set(CMAKE_SYSTEM_PROCESSOR ppc64le)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl)
@ -118,7 +125,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu")
endif() endif()
else() else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!")
endif() endif()
if(DEFINED ENV{TOOLCHAIN}) if(DEFINED ENV{TOOLCHAIN})
@ -148,6 +155,25 @@ if(TIZEN)
include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN})
endif() endif()
function(locate_toolchain_exec exec var)
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
string(TOUPPER ${exec} EXEC_UPPERCASE)
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
return()
endif()
find_program(EXEC_LOCATION_${exec}
NAMES
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
"${TOOLSET_PREFIX}${exec}")
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
endif()
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
endfunction()
if(ANDROID) if(ANDROID)
if(TARGET_ARCH_NAME STREQUAL "arm") if(TARGET_ARCH_NAME STREQUAL "arm")
set(ANDROID_ABI armeabi-v7a) set(ANDROID_ABI armeabi-v7a)
@ -178,66 +204,24 @@ elseif(FREEBSD)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld")
elseif(ILLUMOS) elseif(ILLUMOS)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}") set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
include_directories(SYSTEM ${CROSS_ROOTFS}/include) include_directories(SYSTEM ${CROSS_ROOTFS}/include)
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
function(locate_toolchain_exec exec var)
string(TOUPPER ${exec} EXEC_UPPERCASE)
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
return()
endif()
find_program(EXEC_LOCATION_${exec}
NAMES
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
"${TOOLSET_PREFIX}${exec}")
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
endif()
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
endfunction()
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
elseif(HAIKU) elseif(HAIKU)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}") set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin")
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
function(locate_toolchain_exec exec var)
string(TOUPPER ${exec} EXEC_UPPERCASE)
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
return()
endif()
find_program(EXEC_LOCATION_${exec}
NAMES
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
"${TOOLSET_PREFIX}${exec}")
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
endif()
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
endfunction()
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
# let CMake set up the correct search paths # let CMake set up the correct search paths
include(Platform/Haiku) include(Platform/Haiku)
else() else()
@ -307,7 +291,7 @@ endif()
# Specify compile options # Specify compile options
if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})

0
eng/common/cross/x86/tizen-build-rootfs.sh

0
eng/common/cross/x86/tizen-fetch.sh

2
eng/common/darc-init.sh

@ -68,7 +68,7 @@ function InstallDarcCli {
fi fi
fi fi
local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..." echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed." echo "You may need to restart your command shell if this is the first dotnet tool you have installed."

7
eng/common/dotnet.cmd

@ -0,0 +1,7 @@
@echo off
:: This script is used to install the .NET SDK.
:: It will also invoke the SDK with any provided arguments.
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
exit /b %ErrorLevel%

11
eng/common/dotnet.ps1

@ -0,0 +1,11 @@
# This script is used to install the .NET SDK.
# It will also invoke the SDK with any provided arguments.
. $PSScriptRoot\tools.ps1
$dotnetRoot = InitializeDotNetCli -install:$true
# Invoke acquired SDK with args if they are provided
if ($args.count -gt 0) {
$env:DOTNET_NOLOGO=1
& "$dotnetRoot\dotnet.exe" $args
}

26
eng/common/dotnet.sh

@ -0,0 +1,26 @@
#!/usr/bin/env bash
# This script is used to install the .NET SDK.
# It will also invoke the SDK with any provided arguments.
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source $scriptroot/tools.sh
InitializeDotNetCli true # install
# Invoke acquired SDK with args if they are provided
if [[ $# > 0 ]]; then
__dotnetDir=${_InitializeDotNetCli}
dotnetPath=${__dotnetDir}/dotnet
${dotnetPath} "$@"
fi

49
eng/common/generate-locproject.ps1

@ -33,15 +33,27 @@ $jsonTemplateFiles | ForEach-Object {
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern $jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
$wxlFilesV3 = @()
$wxlFilesV5 = @()
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them $wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
if (-not $wxlFiles) { if (-not $wxlFiles) {
$wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files
if ($wxlEnFiles) { if ($wxlEnFiles) {
$wxlFiles = @() $wxlFiles = @()
$wxlEnFiles | ForEach-Object { $wxlEnFiles | ForEach-Object {
$destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
$wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru $content = Get-Content $_.FullName -Raw
}
# Split files on schema to select different parser settings in the generated project.
if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*")
{
$wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
}
elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*")
{
$wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
}
}
} }
} }
@ -114,7 +126,32 @@ $locJson = @{
CloneLanguageSet = "WiX_CloneLanguages" CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" ) LssFiles = @( "wxl_loc.lss" )
LocItems = @( LocItems = @(
$wxlFiles | ForEach-Object { $wxlFilesV3 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion)) {
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
if ($continue)
{
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = $outputPath
}
}
}
)
},
@{
LanguageSet = $LanguageSet
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "P210WxlSchemaV4.lss" )
LocItems = @(
$wxlFilesV5 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true $continue = $true
foreach ($exclusion in $exclusions.Exclusions) { foreach ($exclusion in $exclusions.Exclusions) {

20
eng/common/generate-sbom-prep.ps1

@ -4,18 +4,26 @@ Param(
. $PSScriptRoot\pipeline-logging-functions.ps1 . $PSScriptRoot\pipeline-logging-functions.ps1
# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
# with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
$ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM"
$SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_'
$SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName
Write-Host "Artifact name before : $ArtifactName"
Write-Host "Artifact name after : $SafeArtifactName"
Write-Host "Creating dir $ManifestDirPath" Write-Host "Creating dir $ManifestDirPath"
# create directory for sbom manifest to be placed # create directory for sbom manifest to be placed
if (!(Test-Path -path $ManifestDirPath)) if (!(Test-Path -path $SbomGenerationDir))
{ {
New-Item -ItemType Directory -path $ManifestDirPath New-Item -ItemType Directory -path $SbomGenerationDir
Write-Host "Successfully created directory $ManifestDirPath" Write-Host "Successfully created directory $SbomGenerationDir"
} }
else{ else{
Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
} }
Write-Host "Updating artifact name" Write-Host "Updating artifact name"
$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_' Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName"
Write-Host "Artifact name $artifact_name"
Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name"

17
eng/common/generate-sbom-prep.sh

@ -14,19 +14,24 @@ done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. $scriptroot/pipeline-logging-functions.sh . $scriptroot/pipeline-logging-functions.sh
# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
manifest_dir=$1 manifest_dir=$1
if [ ! -d "$manifest_dir" ] ; then # Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
mkdir -p "$manifest_dir" # with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
echo "Sbom directory created." $manifest_dir sbom_generation_dir="$manifest_dir/$safe_artifact_name"
if [ ! -d "$sbom_generation_dir" ] ; then
mkdir -p "$sbom_generation_dir"
echo "Sbom directory created." $sbom_generation_dir
else else
Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
fi fi
artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
echo "Artifact name before : "$artifact_name echo "Artifact name before : "$artifact_name
# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
echo "Artifact name after : "$safe_artifact_name echo "Artifact name after : "$safe_artifact_name
export ARTIFACT_NAME=$safe_artifact_name export ARTIFACT_NAME=$safe_artifact_name
echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"

3
eng/common/internal/NuGet.config

@ -4,4 +4,7 @@
<clear /> <clear />
<add key="dotnet-core-internal-tooling" value="https://pkgs.dev.azure.com/devdiv/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json" /> <add key="dotnet-core-internal-tooling" value="https://pkgs.dev.azure.com/devdiv/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json" />
</packageSources> </packageSources>
<packageSourceMapping>
<clear />
</packageSourceMapping>
</configuration> </configuration>

10
eng/common/internal/Tools.csproj

@ -15,16 +15,6 @@
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" /> <PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/> <PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
</ItemGroup> </ItemGroup>
<PropertyGroup>
<RestoreSources></RestoreSources>
<RestoreSources Condition="'$(UsingToolIbcOptimization)' == 'true'">
https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
</RestoreSources>
<RestoreSources Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'">
$(RestoreSources);
https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
</RestoreSources>
</PropertyGroup>
<!-- Repository extensibility point --> <!-- Repository extensibility point -->
<Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" /> <Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />

0
eng/common/native/init-compiler.sh

0
eng/common/native/init-distro-rid.sh

0
eng/common/native/init-os-and-arch.sh

62
eng/common/native/install-dependencies.sh

@ -0,0 +1,62 @@
#!/bin/sh
set -e
# This is a simple script primarily used for CI to install necessary dependencies
#
# Usage:
#
# ./install-dependencies.sh <OS>
os="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
if [ -z "$os" ]; then
. "$(dirname "$0")"/init-os-and-arch.sh
fi
case "$os" in
linux)
if [ -e /etc/os-release ]; then
. /etc/os-release
fi
if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
apt update
apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
libssl-dev libkrb5-dev pigz cpio
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then
pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
$pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
elif [ "$ID" = "alpine" ]; then
apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
else
echo "Unsupported distro. distro: $ID"
exit 1
fi
;;
osx|maccatalyst|ios|iossimulator|tvos|tvossimulator)
echo "Installed xcode version: $(xcode-select -p)"
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
# Skip brew update for now, see https://github.com/actions/setup-python/issues/577
# brew update --preinstall
brew bundle --no-upgrade --file=- <<EOF
brew "cmake"
brew "icu4c"
brew "openssl@3"
brew "pkgconf"
brew "python3"
brew "pigz"
EOF
;;
*)
echo "Unsupported platform. OS: $os"
exit 1
;;
esac

2
eng/common/post-build/nuget-verification.ps1

@ -30,7 +30,7 @@
[CmdletBinding(PositionalBinding = $false)] [CmdletBinding(PositionalBinding = $false)]
param( param(
[string]$NuGetExePath, [string]$NuGetExePath,
[string]$PackageSource = "https://api.nuget.org/v3/index.json", [string]$PackageSource = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json",
[string]$DownloadPath, [string]$DownloadPath,
[Parameter(ValueFromRemainingArguments = $true)] [Parameter(ValueFromRemainingArguments = $true)]
[string[]]$args [string[]]$args

12
eng/common/post-build/publish-using-darc.ps1

@ -5,7 +5,9 @@ param(
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish, [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $RequireDefaultChannels,
[Parameter(Mandatory=$false)][string] $SkipAssetsPublishing
) )
try { try {
@ -34,6 +36,14 @@ try {
$optionalParams.Add("--no-wait") | Out-Null $optionalParams.Add("--no-wait") | Out-Null
} }
if ("true" -eq $RequireDefaultChannels) {
$optionalParams.Add("--default-channels-required") | Out-Null
}
if ("true" -eq $SkipAssetsPublishing) {
$optionalParams.Add("--skip-assets-publishing") | Out-Null
}
& $darc add-build-to-channel ` & $darc add-build-to-channel `
--id $buildId ` --id $buildId `
--publishing-infra-version $PublishingInfraVersion ` --publishing-infra-version $PublishingInfraVersion `

12
eng/common/sdk-task.ps1

@ -6,13 +6,15 @@ Param(
[string] $msbuildEngine = $null, [string] $msbuildEngine = $null,
[switch] $restore, [switch] $restore,
[switch] $prepareMachine, [switch] $prepareMachine,
[switch][Alias('nobl')]$excludeCIBinaryLog,
[switch]$noWarnAsError,
[switch] $help, [switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
) )
$ci = $true $ci = $true
$binaryLog = $true $binaryLog = if ($excludeCIBinaryLog) { $false } else { $true }
$warnAsError = $true $warnAsError = if ($noWarnAsError) { $false } else { $true }
. $PSScriptRoot\tools.ps1 . $PSScriptRoot\tools.ps1
@ -27,6 +29,7 @@ function Print-Usage() {
Write-Host "Advanced settings:" Write-Host "Advanced settings:"
Write-Host " -prepareMachine Prepare machine for CI run" Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
Write-Host "" Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild." Write-Host "Command line arguments not listed above are passed thru to msbuild."
} }
@ -34,10 +37,11 @@ function Print-Usage() {
function Build([string]$target) { function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog" $log = Join-Path $LogDir "$task$logSuffix.binlog"
$binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
$outputPath = Join-Path $ToolsetDir "$task\" $outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject ` MSBuild $taskProject `
/bl:$log ` $binaryLogArg `
/t:$target ` /t:$target `
/p:Configuration=$configuration ` /p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot ` /p:RepoRoot=$RepoRoot `
@ -64,7 +68,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
} }
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.12.0" -MemberType NoteProperty $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.13.0" -MemberType NoteProperty
} }
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true

121
eng/common/sdk-task.sh

@ -0,0 +1,121 @@
#!/usr/bin/env bash
show_usage() {
echo "Common settings:"
echo " --task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
echo " --restore Restore dependencies"
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
echo " --noWarnAsError Do not warn as error"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
}
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
Build() {
local target=$1
local log_suffix=""
[[ "$target" != "Execute" ]] && log_suffix=".$target"
local log="$log_dir/$task$log_suffix.binlog"
local binaryLogArg=""
[[ $binary_log == true ]] && binaryLogArg="/bl:$log"
local output_path="$toolset_dir/$task/"
MSBuild "$taskProject" \
$binaryLogArg \
/t:"$target" \
/p:Configuration="$configuration" \
/p:RepoRoot="$repo_root" \
/p:BaseIntermediateOutputPath="$output_path" \
/v:"$verbosity" \
$properties
}
binary_log=true
configuration="Debug"
verbosity="minimal"
exclude_ci_binary_log=false
restore=false
help=false
properties=''
warnAsError=true
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--task)
task=$2
shift 2
;;
--restore)
restore=true
shift 1
;;
--verbosity)
verbosity=$2
shift 2
;;
--excludecibinarylog|--nobl)
binary_log=false
exclude_ci_binary_log=true
shift 1
;;
--noWarnAsError)
warnAsError=false
shift 1
;;
--help)
help=true
shift 1
;;
*)
properties="$properties $1"
shift 1
;;
esac
done
ci=true
if $help; then
show_usage
exit 0
fi
. "$scriptroot/tools.sh"
InitializeToolset
if [[ -z "$task" ]]; then
Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task <value>'"
ExitWithExitCode 1
fi
taskProject=$(GetSdkTaskProject "$task")
if [[ ! -e "$taskProject" ]]; then
Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task"
ExitWithExitCode 1
fi
if $restore; then
Build "Restore"
fi
Build "Execute"
ExitWithExitCode 0

2
eng/common/sdl/packages.config

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<packages> <packages>
<package id="Microsoft.Guardian.Cli" version="0.109.0"/> <package id="Microsoft.Guardian.Cli" version="0.199.0"/>
</packages> </packages>

4
eng/common/template-guidance.md

@ -50,14 +50,14 @@ extends:
- task: CopyFiles@2 - task: CopyFiles@2
displayName: Gather build output displayName: Gather build output
inputs: inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel'
Contents: '**' Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
``` ```
Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
# Development notes ## Development notes
**Folder / file structure** **Folder / file structure**

7
eng/common/templates-official/job/job.yml

@ -3,7 +3,7 @@ parameters:
enableSbom: true enableSbom: true
runAsPublic: false runAsPublic: false
PackageVersion: 9.0.0 PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts' BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
jobs: jobs:
- template: /eng/common/core-templates/job/job.yml - template: /eng/common/core-templates/job/job.yml
@ -16,6 +16,7 @@ jobs:
parameters: parameters:
PackageVersion: ${{ parameters.packageVersion }} PackageVersion: ${{ parameters.packageVersion }}
BuildDropPath: ${{ parameters.buildDropPath }} BuildDropPath: ${{ parameters.buildDropPath }}
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
publishArtifacts: false publishArtifacts: false
# publish artifacts # publish artifacts
@ -30,6 +31,7 @@ jobs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
condition: always() condition: always()
retryCountOnTaskFailure: 10 # for any logs being locked
continueOnError: true continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact - output: pipelineArtifact
@ -38,6 +40,7 @@ jobs:
displayName: 'Publish logs' displayName: 'Publish logs'
continueOnError: true continueOnError: true
condition: always() condition: always()
retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs sbomEnabled: false # we don't need SBOM for logs
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}: - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
@ -45,7 +48,7 @@ jobs:
displayName: Publish Logs displayName: Publish Logs
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container publishLocation: Container
ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true continueOnError: true
condition: always() condition: always()
sbomEnabled: false # we don't need SBOM for logs sbomEnabled: false # we don't need SBOM for logs

7
eng/common/templates-official/steps/publish-build-artifacts.yml

@ -25,6 +25,10 @@ parameters:
type: boolean type: boolean
default: true default: true
- name: retryCountOnTaskFailure
type: string
default: 10
steps: steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}: - ${{ if ne(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
@ -38,4 +42,5 @@ steps:
PathtoPublish: ${{ parameters.pathToPublish }} PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}: ${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }} ArtifactName: ${{ parameters.artifactName }}
${{ if parameters.retryCountOnTaskFailure }}:
retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}

7
eng/common/templates-official/steps/source-index-stage1-publish.yml

@ -0,0 +1,7 @@
steps:
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}

2
eng/common/templates-official/variables/sdl-variables.yml

@ -4,4 +4,4 @@ variables:
- name: DefaultGuardianVersion - name: DefaultGuardianVersion
value: 0.109.0 value: 0.109.0
- name: GuardianPackagesConfigFile - name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config

8
eng/common/templates/job/job.yml

@ -6,7 +6,7 @@ parameters:
enableSbom: true enableSbom: true
runAsPublic: false runAsPublic: false
PackageVersion: 9.0.0 PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts' BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
jobs: jobs:
- template: /eng/common/core-templates/job/job.yml - template: /eng/common/core-templates/job/job.yml
@ -46,6 +46,7 @@ jobs:
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true continueOnError: true
condition: always() condition: always()
retryCountOnTaskFailure: 10 # for any logs being locked
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters: parameters:
@ -56,6 +57,7 @@ jobs:
displayName: 'Publish logs' displayName: 'Publish logs'
continueOnError: true continueOnError: true
condition: always() condition: always()
retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs sbomEnabled: false # we don't need SBOM for logs
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
@ -66,7 +68,7 @@ jobs:
displayName: Publish Logs displayName: Publish Logs
pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container publishLocation: Container
artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true continueOnError: true
condition: always() condition: always()
@ -75,7 +77,7 @@ jobs:
parameters: parameters:
is1ESPipeline: false is1ESPipeline: false
args: args:
targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration'
artifactName: 'BuildConfiguration' artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration' displayName: 'Publish build retry configuration'
continueOnError: true continueOnError: true

6
eng/common/templates/steps/publish-build-artifacts.yml

@ -25,6 +25,10 @@ parameters:
type: string type: string
default: 'Container' default: 'Container'
- name: retryCountOnTaskFailure
type: string
default: 10
steps: steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}: - ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error - 'eng/common/templates cannot be referenced from a 1ES managed template': error
@ -38,3 +42,5 @@ steps:
PathtoPublish: ${{ parameters.pathToPublish }} PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}: ${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }} ArtifactName: ${{ parameters.artifactName }}
${{ if parameters.retryCountOnTaskFailure }}:
retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}

7
eng/common/templates/steps/source-index-stage1-publish.yml

@ -0,0 +1,7 @@
steps:
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}

207
eng/common/templates/steps/vmr-sync.yml

@ -0,0 +1,207 @@
### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet).
### They initialize the darc CLI and pull the new updates.
### Changes are applied locally onto the already cloned VMR (located in $vmrPath).
parameters:
- name: targetRef
displayName: Target revision in dotnet/<repo> to synchronize
type: string
default: $(Build.SourceVersion)
- name: vmrPath
displayName: Path where the dotnet/dotnet is checked out to
type: string
default: $(Agent.BuildDirectory)/vmr
- name: additionalSyncs
displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol
type: object
default: []
steps:
- checkout: vmr
displayName: Clone dotnet/dotnet
path: vmr
clean: true
- checkout: self
displayName: Clone $(Build.Repository.Name)
path: repo
fetchDepth: 0
# This step is needed so that when we get a detached HEAD / shallow clone,
# we still pull the commit into the temporary repo clone to use it during the sync.
# Also unshallow the clone so that forwardflow command would work.
- script: |
git branch repo-head
git rev-parse HEAD
displayName: Label PR commit
workingDirectory: $(Agent.BuildDirectory)/repo
- script: |
vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml)
echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
displayName: Obtain the vmr sha from Version.Details.xml (Unix)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- powershell: |
[xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml
$vmr_sha = $xml.SelectSingleNode("//Source").Sha
Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
displayName: Obtain the vmr sha from Version.Details.xml (Windows)
condition: eq(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- script: |
git fetch --all
git checkout $(vmr_sha)
displayName: Checkout VMR at correct sha for repo flow
workingDirectory: ${{ parameters.vmrPath }}
- script: |
git config --global user.name "dotnet-maestro[bot]"
git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
displayName: Set git author to dotnet-maestro[bot]
workingDirectory: ${{ parameters.vmrPath }}
- script: |
./eng/common/vmr-sync.sh \
--vmr ${{ parameters.vmrPath }} \
--tmp $(Agent.TempDirectory) \
--azdev-pat '$(dn-bot-all-orgs-code-r)' \
--ci \
--debug
if [ "$?" -ne 0 ]; then
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
fi
displayName: Sync repo into VMR (Unix)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- script: |
git config --global diff.astextplain.textconv echo
git config --system core.longpaths true
displayName: Configure Windows git (longpaths, astextplain)
condition: eq(variables['Agent.OS'], 'Windows_NT')
- powershell: |
./eng/common/vmr-sync.ps1 `
-vmr ${{ parameters.vmrPath }} `
-tmp $(Agent.TempDirectory) `
-azdevPat '$(dn-bot-all-orgs-code-r)' `
-ci `
-debugOutput
if ($LASTEXITCODE -ne 0) {
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
}
displayName: Sync repo into VMR (Windows)
condition: eq(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
- task: CopyFiles@2
displayName: Collect failed patches
condition: failed()
inputs:
SourceFolder: '$(Agent.TempDirectory)'
Contents: '*.patch'
TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches'
- publish: '$(Build.ArtifactStagingDirectory)/FailedPatches'
artifact: $(System.JobDisplayName)_FailedPatches
displayName: Upload failed patches
condition: failed()
- ${{ each assetName in parameters.additionalSyncs }}:
# The vmr-sync script ends up staging files in the local VMR so we have to commit those
- script:
git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)"
displayName: Commit local VMR changes
workingDirectory: ${{ parameters.vmrPath }}
- script: |
set -ex
echo "Searching for details of asset ${{ assetName }}..."
# Use darc to get dependencies information
dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci)
# Extract repository URL and commit hash
repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1)
if [ -z "$repository" ]; then
echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list"
exit 1
fi
commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1)
echo "Updating the VMR from $repository / $commit..."
cd ..
git clone $repository ${{ assetName }}
cd ${{ assetName }}
git checkout $commit
git branch "sync/$commit"
./eng/common/vmr-sync.sh \
--vmr ${{ parameters.vmrPath }} \
--tmp $(Agent.TempDirectory) \
--azdev-pat '$(dn-bot-all-orgs-code-r)' \
--ci \
--debug
if [ "$?" -ne 0 ]; then
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
fi
displayName: Sync ${{ assetName }} into (Unix)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- powershell: |
$ErrorActionPreference = 'Stop'
Write-Host "Searching for details of asset ${{ assetName }}..."
$dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci
$repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1
$repository -match 'Repo:\s+([^\s]+)' | Out-Null
$repository = $matches[1]
if ($repository -eq $null) {
Write-Error "Asset ${{ assetName }} not found in the dependency list"
exit 1
}
$commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1
$commit -match 'Commit:\s+([^\s]+)' | Out-Null
$commit = $matches[1]
Write-Host "Updating the VMR from $repository / $commit..."
cd ..
git clone $repository ${{ assetName }}
cd ${{ assetName }}
git checkout $commit
git branch "sync/$commit"
.\eng\common\vmr-sync.ps1 `
-vmr ${{ parameters.vmrPath }} `
-tmp $(Agent.TempDirectory) `
-azdevPat '$(dn-bot-all-orgs-code-r)' `
-ci `
-debugOutput
if ($LASTEXITCODE -ne 0) {
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
}
displayName: Sync ${{ assetName }} into (Windows)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo

42
eng/common/templates/vmr-build-pr.yml

@ -0,0 +1,42 @@
# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
#
# It will run a full set of verification jobs defined in:
# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
#
# For repos that do not need to run the full set, you would do the following:
#
# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
#
# 2. Add `verifications` parameter to VMR template reference
#
# Examples:
# - For source-build stage 1 verification, add the following:
# verifications: [ "source-build-stage1" ]
#
# - For Windows only verifications, add the following:
# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
trigger: none
pr: none
variables:
- template: /eng/common/templates/variables/pool-providers.yml@self
- name: skipComponentGovernanceDetection # we run CG on internal builds only
value: true
- name: Codeql.Enabled # we run CodeQL on internal builds only
value: false
resources:
repositories:
- repository: vmr
type: github
name: dotnet/dotnet
endpoint: dotnet
stages:
- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
parameters:
isBuiltFromVmr: false
scope: lite

72
eng/common/tools.ps1

@ -42,7 +42,7 @@
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } [bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts. # Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1 # default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } [string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory. # True to use global NuGet cache instead of restoring packages to repository-local directory.
@ -65,10 +65,8 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed # Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } [string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
# True if the build is a product build # True when the build is running within the VMR.
[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false } [bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false }
[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() }
function Create-Directory ([string[]] $path) { function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
@ -259,10 +257,23 @@ function Retry($downloadBlock, $maxRetries = 5) {
function GetDotNetInstallScript([string] $dotnetRoot) { function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
$shouldDownload = $false
if (!(Test-Path $installScript)) { if (!(Test-Path $installScript)) {
$shouldDownload = $true
} else {
# Check if the script is older than 30 days
$fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime
if ($fileAge.Days -gt 30) {
Write-Host "Existing install script is too old, re-downloading..."
$shouldDownload = $true
}
}
if ($shouldDownload) {
Create-Directory $dotnetRoot Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
$uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
Retry({ Retry({
Write-Host "GET $uri" Write-Host "GET $uri"
@ -383,8 +394,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# If the version of msbuild is going to be xcopied, # If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here: # use this version. Version matches a package here:
# https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.12.0 # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.13.0
$defaultXCopyMSBuildVersion = '17.12.0' $defaultXCopyMSBuildVersion = '17.13.0'
if (!$vsRequirements) { if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@ -416,7 +427,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# Locate Visual Studio installation or download x-copy msbuild. # Locate Visual Studio installation or download x-copy msbuild.
$vsInfo = LocateVisualStudio $vsRequirements $vsInfo = LocateVisualStudio $vsRequirements
if ($vsInfo -ne $null) { if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) {
# Ensure vsInstallDir has a trailing slash # Ensure vsInstallDir has a trailing slash
$vsInstallDir = Join-Path $vsInfo.installationPath "\" $vsInstallDir = Join-Path $vsInfo.installationPath "\"
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
@ -533,7 +544,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere $vswhereVersion = $GlobalJson.tools.vswhere
} else { } else {
$vswhereVersion = '2.5.2' # keep this in sync with the VSWhereVersion in DefaultVersions.props
$vswhereVersion = '3.1.7'
} }
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
@ -541,7 +553,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (!(Test-Path $vsWhereExe)) { if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir Create-Directory $vsWhereDir
Write-Host 'Downloading vswhere' Write-Host "Downloading vswhere $vswhereVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({ Retry({
Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}) })
@ -604,14 +617,7 @@ function InitializeBuildTool() {
} }
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
# Use override if it exists - commonly set by source-build $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' }
if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) {
$initializeBuildToolFramework="net9.0"
} else {
$initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework
}
$buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework }
} elseif ($msbuildEngine -eq "vs") { } elseif ($msbuildEngine -eq "vs") {
try { try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore $msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@ -620,7 +626,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1 ExitWithExitCode 1
} }
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS } $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS }
} else { } else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1 ExitWithExitCode 1
@ -653,7 +659,6 @@ function GetNuGetPackageCachePath() {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else { } else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
$env:RESTORENOHTTPCACHE = $true
} }
} }
@ -775,26 +780,13 @@ function MSBuild() {
$toolsetBuildProject = InitializeToolset $toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject $basePath = Split-Path -parent $toolsetBuildProject
$possiblePaths = @( $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')
# new scripts need to work with old packages, so we need to look for the old names/versions
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
(Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')),
(Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
)
$selectedPath = $null
foreach ($path in $possiblePaths) {
if (Test-Path $path -PathType Leaf) {
$selectedPath = $path
break
}
}
if (-not $selectedPath) { if (-not $selectedPath) {
Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.' Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1 ExitWithExitCode 1
} }
$args += "/logger:$selectedPath" $args += "/logger:$selectedPath"
} }
@ -857,8 +849,8 @@ function MSBuild-Core() {
} }
# When running on Azure Pipelines, override the returned exit code to avoid double logging. # When running on Azure Pipelines, override the returned exit code to avoid double logging.
# Skip this when the build is a child of the VMR orchestrator build. # Skip this when the build is a child of the VMR build.
if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) { if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error

77
eng/common/tools.sh

@ -5,6 +5,9 @@
# CI mode - set to true on CI server for PR validation build or official build. # CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false} ci=${ci:-false}
# Build mode
source_build=${source_build:-false}
# Set to true to use the pipelines logger which will enable Azure logging output. # Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md # https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across # This flag is meant as a temporary opt-opt for the feature while validate it across
@ -54,11 +57,12 @@ warn_as_error=${warn_as_error:-true}
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts. # Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh # default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory. # True to use global NuGet cache instead of restoring packages to repository-local directory.
if [[ "$ci" == true ]]; then # Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props.
if [[ "$ci" == true || "$source_build" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false} use_global_nuget_cache=${use_global_nuget_cache:-false}
else else
use_global_nuget_cache=${use_global_nuget_cache:-true} use_global_nuget_cache=${use_global_nuget_cache:-true}
@ -68,8 +72,8 @@ fi
runtime_source_feed=${runtime_source_feed:-''} runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''} runtime_source_feed_key=${runtime_source_feed_key:-''}
# True if the build is a product build # True when the build is running within the VMR.
product_build=${product_build:-false} from_vmr=${from_vmr:-false}
# Resolve any symlinks in the given path. # Resolve any symlinks in the given path.
function ResolvePath { function ResolvePath {
@ -295,9 +299,30 @@ function with_retries {
function GetDotNetInstallScript { function GetDotNetInstallScript {
local root=$1 local root=$1
local install_script="$root/dotnet-install.sh" local install_script="$root/dotnet-install.sh"
local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
local timestamp_file="$root/.dotnet-install.timestamp"
local should_download=false
if [[ ! -a "$install_script" ]]; then if [[ ! -a "$install_script" ]]; then
should_download=true
elif [[ -f "$timestamp_file" ]]; then
# Check if the script is older than 30 days using timestamp file
local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0")
local current_time=$(date +%s)
local age_seconds=$((current_time - download_time))
# 30 days = 30 * 24 * 60 * 60 = 2592000 seconds
if [[ $age_seconds -gt 2592000 ]]; then
echo "Existing install script is too old, re-downloading..."
should_download=true
fi
else
# No timestamp file exists, assume script is old and re-download
echo "No timestamp found for existing install script, re-downloading..."
should_download=true
fi
if [[ "$should_download" == true ]]; then
mkdir -p "$root" mkdir -p "$root"
echo "Downloading '$install_script_url'" echo "Downloading '$install_script_url'"
@ -324,6 +349,9 @@ function GetDotNetInstallScript {
ExitWithExitCode $exit_code ExitWithExitCode $exit_code
} }
fi fi
# Create timestamp file to track download time in seconds from epoch
date +%s > "$timestamp_file"
fi fi
# return value # return value
_GetDotNetInstallScript="$install_script" _GetDotNetInstallScript="$install_script"
@ -339,22 +367,14 @@ function InitializeBuildTool {
# return values # return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet" _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild" _InitializeBuildToolCommand="msbuild"
# use override if it exists - commonly set by source-build
if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
_InitializeBuildToolFramework="net9.0"
else
_InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
fi
} }
# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath { function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages/" export NUGET_PACKAGES="$HOME/.nuget/packages/"
else else
export NUGET_PACKAGES="$repo_root/.packages/" export NUGET_PACKAGES="$repo_root/.packages/"
export RESTORENOHTTPCACHE=true
fi fi
fi fi
@ -451,25 +471,13 @@ function MSBuild {
fi fi
local toolset_dir="${_InitializeToolset%/*}" local toolset_dir="${_InitializeToolset%/*}"
# new scripts need to work with old packages, so we need to look for the old names/versions local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll"
local selectedPath=
local possiblePaths=()
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
for path in "${possiblePaths[@]}"; do
if [[ -f $path ]]; then
selectedPath=$path
break
fi
done
if [[ -z "$selectedPath" ]]; then if [[ -z "$selectedPath" ]]; then
Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly." Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1 ExitWithExitCode 1
fi fi
args+=( "-logger:$selectedPath" ) args+=( "-logger:$selectedPath" )
fi fi
@ -506,8 +514,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above." echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging. # When running on Azure Pipelines, override the returned exit code to avoid double logging.
# Skip this when the build is a child of the VMR orchestrator build. # Skip this when the build is a child of the VMR build.
if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
@ -530,6 +538,13 @@ function GetDarc {
fi fi
"$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version
darc_tool="$darc_path/darc"
}
# Returns a full path to an Arcade SDK task project file.
function GetSdkTaskProject {
taskName=$1
echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj"
} }
ResolvePath "${BASH_SOURCE[0]}" ResolvePath "${BASH_SOURCE[0]}"

138
eng/common/vmr-sync.ps1

@ -0,0 +1,138 @@
<#
.SYNOPSIS
This script is used for synchronizing the current repository into a local VMR.
It pulls the current repository's code into the specified VMR directory for local testing or
Source-Build validation.
.DESCRIPTION
The tooling used for synchronization will clone the VMR repository into a temporary folder if
it does not already exist. These clones can be reused in future synchronizations, so it is
recommended to dedicate a folder for this to speed up re-runs.
.EXAMPLE
Synchronize current repository into a local VMR:
./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
.PARAMETER tmpDir
Required. Path to the temporary folder where repositories will be cloned
.PARAMETER vmrBranch
Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
.PARAMETER azdevPat
Optional. Azure DevOps PAT to use for cloning private repositories.
.PARAMETER vmrDir
Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
.PARAMETER debugOutput
Optional. Enables debug logging in the darc vmr command.
.PARAMETER ci
Optional. Denotes that the script is running in a CI environment.
#>
param (
[Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
[string][Alias('t', 'tmp')]$tmpDir,
[string][Alias('b', 'branch')]$vmrBranch,
[string]$remote,
[string]$azdevPat,
[string][Alias('v', 'vmr')]$vmrDir,
[switch]$ci,
[switch]$debugOutput
)
function Fail {
Write-Host "> $($args[0])" -ForegroundColor 'Red'
}
function Highlight {
Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
}
$verbosity = 'verbose'
if ($debugOutput) {
$verbosity = 'debug'
}
# Validation
if (-not $tmpDir) {
Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
exit 1
}
# Sanitize the input
if (-not $vmrDir) {
$vmrDir = Join-Path $tmpDir 'dotnet'
}
if (-not (Test-Path -Path $tmpDir -PathType Container)) {
New-Item -ItemType Directory -Path $tmpDir | Out-Null
}
# Prepare the VMR
if (-not (Test-Path -Path $vmrDir -PathType Container)) {
Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
git clone https://github.com/dotnet/dotnet $vmrDir
if ($vmrBranch) {
git -C $vmrDir switch -c $vmrBranch
}
}
else {
if ((git -C $vmrDir diff --quiet) -eq $false) {
Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
exit 1
}
if ($vmrBranch) {
Highlight "Preparing $vmrDir"
git -C $vmrDir checkout $vmrBranch
git -C $vmrDir pull
}
}
Set-StrictMode -Version Latest
# Prepare darc
Highlight 'Installing .NET, preparing the tooling..'
. .\eng\common\tools.ps1
$dotnetRoot = InitializeDotNetCli -install:$true
$darc = Get-Darc
$dotnet = "$dotnetRoot\dotnet.exe"
Highlight "Starting the synchronization of VMR.."
# Synchronize the VMR
$darcArgs = (
"vmr", "forwardflow",
"--tmp", $tmpDir,
"--$verbosity",
$vmrDir
)
if ($ci) {
$darcArgs += ("--ci")
}
if ($azdevPat) {
$darcArgs += ("--azdev-pat", $azdevPat)
}
& "$darc" $darcArgs
if ($LASTEXITCODE -eq 0) {
Highlight "Synchronization succeeded"
}
else {
Fail "Synchronization of repo to VMR failed!"
Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
exit 1
}

207
eng/common/vmr-sync.sh

@ -0,0 +1,207 @@
#!/bin/bash
### This script is used for synchronizing the current repository into a local VMR.
### It pulls the current repository's code into the specified VMR directory for local testing or
### Source-Build validation.
###
### The tooling used for synchronization will clone the VMR repository into a temporary folder if
### it does not already exist. These clones can be reused in future synchronizations, so it is
### recommended to dedicate a folder for this to speed up re-runs.
###
### USAGE:
### Synchronize current repository into a local VMR:
### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet"
###
### Options:
### -t, --tmp, --tmp-dir PATH
### Required. Path to the temporary folder where repositories will be cloned
###
### -b, --branch, --vmr-branch BRANCH_NAME
### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
###
### --debug
### Optional. Turns on the most verbose logging for the VMR tooling
###
### --remote name:URI
### Optional. Additional remote to use during the synchronization
### This can be used to synchronize to a commit from a fork of the repository
### Example: 'runtime:https://github.com/yourfork/runtime'
###
### --azdev-pat
### Optional. Azure DevOps PAT to use for cloning private repositories.
###
### -v, --vmr, --vmr-dir PATH
### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
function print_help () {
sed -n '/^### /,/^$/p' "$source" | cut -b 5-
}
COLOR_RED=$(tput setaf 1 2>/dev/null || true)
COLOR_CYAN=$(tput setaf 6 2>/dev/null || true)
COLOR_CLEAR=$(tput sgr0 2>/dev/null || true)
COLOR_RESET=uniquesearchablestring
FAILURE_PREFIX='> '
function fail () {
echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2
}
function highlight () {
echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}"
}
tmp_dir=''
vmr_dir=''
vmr_branch=''
additional_remotes=''
verbosity=verbose
azdev_pat=''
ci=false
while [[ $# -gt 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-t|--tmp|--tmp-dir)
tmp_dir=$2
shift
;;
-v|--vmr|--vmr-dir)
vmr_dir=$2
shift
;;
-b|--branch|--vmr-branch)
vmr_branch=$2
shift
;;
--remote)
additional_remotes="$additional_remotes $2"
shift
;;
--azdev-pat)
azdev_pat=$2
shift
;;
--ci)
ci=true
;;
-d|--debug)
verbosity=debug
;;
-h|--help)
print_help
exit 0
;;
*)
fail "Invalid argument: $1"
print_help
exit 1
;;
esac
shift
done
# Validation
if [[ -z "$tmp_dir" ]]; then
fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned"
exit 1
fi
# Sanitize the input
if [[ -z "$vmr_dir" ]]; then
vmr_dir="$tmp_dir/dotnet"
fi
if [[ ! -d "$tmp_dir" ]]; then
mkdir -p "$tmp_dir"
fi
if [[ "$verbosity" == "debug" ]]; then
set -x
fi
# Prepare the VMR
if [[ ! -d "$vmr_dir" ]]; then
highlight "Cloning 'dotnet/dotnet' into $vmr_dir.."
git clone https://github.com/dotnet/dotnet "$vmr_dir"
if [[ -n "$vmr_branch" ]]; then
git -C "$vmr_dir" switch -c "$vmr_branch"
fi
else
if ! git -C "$vmr_dir" diff --quiet; then
fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes"
exit 1
fi
if [[ -n "$vmr_branch" ]]; then
highlight "Preparing $vmr_dir"
git -C "$vmr_dir" checkout "$vmr_branch"
git -C "$vmr_dir" pull
fi
fi
set -e
# Prepare darc
highlight 'Installing .NET, preparing the tooling..'
source "./eng/common/tools.sh"
InitializeDotNetCli true
GetDarc
dotnetDir=$( cd ./.dotnet/; pwd -P )
dotnet=$dotnetDir/dotnet
highlight "Starting the synchronization of VMR.."
set +e
if [[ -n "$additional_remotes" ]]; then
additional_remotes="--additional-remotes $additional_remotes"
fi
if [[ -n "$azdev_pat" ]]; then
azdev_pat="--azdev-pat $azdev_pat"
fi
ci_arg=''
if [[ "$ci" == "true" ]]; then
ci_arg="--ci"
fi
# Synchronize the VMR
export DOTNET_ROOT="$dotnetDir"
"$darc_tool" vmr forwardflow \
--tmp "$tmp_dir" \
$azdev_pat \
--$verbosity \
$ci_arg \
$additional_remotes \
"$vmr_dir"
if [[ $? == 0 ]]; then
highlight "Synchronization succeeded"
else
fail "Synchronization of repo to VMR failed!"
fail "'$vmr_dir' is left in its last state (re-run of this script will reset it)."
fail "Please inspect the logs which contain path to the failing patch file (use --debug to get all the details)."
fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
exit 1
fi

4
global.json

@ -17,8 +17,8 @@
}, },
"msbuild-sdks": { "msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.25058.5", "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25562.108",
"Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.25058.5", "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.25562.108",
"MSBuild.Sdk.Extras": "3.0.44", "MSBuild.Sdk.Extras": "3.0.44",
"MSBuild.SDK.SystemWeb": "4.0.97" "MSBuild.SDK.SystemWeb": "4.0.97"
} }

2
test/OpenIddict.Abstractions.Tests/OpenIddict.Abstractions.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Client.AspNetCore.IntegrationTests/OpenIddict.Client.AspNetCore.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Client.IntegrationTests/OpenIddict.Client.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Client.Owin.IntegrationTests/OpenIddict.Client.Owin.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Core.Tests/OpenIddict.Core.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.EntityFramework.Tests/OpenIddict.EntityFramework.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.EntityFrameworkCore.Tests/OpenIddict.EntityFrameworkCore.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Quartz.Tests/OpenIddict.Quartz.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Server.AspNetCore.IntegrationTests/OpenIddict.Server.AspNetCore.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Server.DataProtection.Tests/OpenIddict.Server.DataProtection.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Server.IntegrationTests/OpenIddict.Server.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Server.Owin.IntegrationTests/OpenIddict.Server.Owin.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Server.Tests/OpenIddict.Server.Tests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Validation.AspNetCore.IntegrationTests/OpenIddict.Validation.AspNetCore.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Validation.IntegrationTests/OpenIddict.Validation.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks);$(NetCoreTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48;$(NetCoreTargetFrameworks)</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

2
test/OpenIddict.Validation.Owin.IntegrationTests/OpenIddict.Validation.Owin.IntegrationTests.csproj

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFrameworks>$(NetFrameworkTargetFrameworks)</TargetFrameworks> <TargetFrameworks>net472;net48</TargetFrameworks>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

Loading…
Cancel
Save