Browse Source

Use NuGet.exe instead of dotnet nuget push and update .NET Arcade

pull/1364/head
Kévin Chalet 4 years ago
parent
commit
a12b8d41b1
  1. 10
      .github/workflows/build.yml
  2. 8
      eng/Version.Details.xml
  3. 6
      eng/common/SetupNugetSources.ps1
  4. 24
      eng/common/SetupNugetSources.sh
  5. 4
      eng/common/build.sh
  6. 42
      eng/common/cross/build-rootfs.sh
  7. 4
      eng/common/cross/toolchain.cmake
  8. 2
      eng/common/init-tools-native.sh
  9. 3
      eng/common/internal/Tools.csproj
  10. 8
      eng/common/native/common-library.sh
  11. 25
      eng/common/post-build/sourcelink-validation.ps1
  12. 3
      eng/common/sdk-task.ps1
  13. 109
      eng/common/sdl/configure-sdl-tool.ps1
  14. 71
      eng/common/sdl/execute-all-sdl-tools.ps1
  15. 63
      eng/common/sdl/extract-artifact-archives.ps1
  16. 50
      eng/common/sdl/run-sdl.ps1
  17. 108
      eng/common/templates/job/execute-sdl.yml
  18. 2
      eng/common/templates/job/job.yml
  19. 26
      eng/common/templates/job/publish-build-assets.yml
  20. 21
      eng/common/templates/job/source-index-stage1.yml
  21. 31
      eng/common/templates/steps/source-build.yml
  22. 108
      eng/common/tools.ps1
  23. 20
      eng/common/tools.sh
  24. 8
      global.json

10
.github/workflows/build.yml

@ -28,8 +28,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup .NET Core SDK
uses: actions/setup-dotnet@v1
- name: Setup NuGet
uses: nuget/setup-nuget@v1
with:
nuget-version: '5.11.0'
# Arcade only allows the revision to contain up to two characters, and GitHub Actions does not roll-over
# build numbers every day like Azure DevOps does. To balance these two requirements, set the official
@ -87,8 +89,8 @@ jobs:
- name: Push NuGet packages to MyGet.org
if: ${{ github.repository_owner == 'openiddict' && (github.ref == 'refs/heads/dev' || startsWith(github.ref, 'refs/heads/rel/') || startsWith(github.ref, 'refs/tags/')) && runner.os == 'Windows' }}
run: dotnet nuget push "artifacts\packages\Release\Shipping\*.nupkg" --api-key ${{ secrets.MYGET_API_KEY }} --skip-duplicate --source https://www.myget.org/F/openiddict/api/v3/index.json
run: nuget push "artifacts\packages\Release\Shipping\*.nupkg" -ApiKey ${{ secrets.MYGET_API_KEY }} -SkipDuplicate -Source https://www.myget.org/F/openiddict/api/v3/index.json
- name: Push NuGet packages to NuGet.org
if: ${{ github.repository_owner == 'openiddict' && startsWith(github.ref, 'refs/tags/') && runner.os == 'Windows' }}
run: dotnet nuget push "artifacts\packages\Release\Shipping\*.nupkg" --api-key ${{ secrets.NUGET_API_KEY }} --skip-duplicate --source https://api.nuget.org/v3/index.json
run: nuget push "artifacts\packages\Release\Shipping\*.nupkg" -ApiKey ${{ secrets.NUGET_API_KEY }} -SkipDuplicate -Source https://api.nuget.org/v3/index.json

8
eng/Version.Details.xml

@ -5,14 +5,14 @@
</ProductDependencies>
<ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="6.0.0-beta.21316.1">
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="6.0.0-beta.21519.3">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>bdcc258d0d4a547be51703795a140fa6b1f68c57</Sha>
<Sha>85f3aa16d8797b5020f1fda11df1a958feb5f8df</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="6.0.0-beta.21316.1">
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="6.0.0-beta.21519.3">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>bdcc258d0d4a547be51703795a140fa6b1f68c57</Sha>
<Sha>85f3aa16d8797b5020f1fda11df1a958feb5f8df</Sha>
</Dependency>
</ToolsetDependencies>

6
eng/common/SetupNugetSources.ps1

@ -158,4 +158,10 @@ if ($dotnet5Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
$dotnet6Source = $sources.SelectSingleNode("add[@key='dotnet6']")
if ($dotnet6Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet6-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "dotnet6-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
$doc.Save($filename)

24
eng/common/SetupNugetSources.sh

@ -129,6 +129,30 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet5-internal-transport')
fi
# Ensure dotnet6-internal and dotnet6-internal-transport are in the packageSources if the public dotnet6 feeds are present
grep -i "<add key=\"dotnet6\"" $ConfigFile
if [ "$?" == "0" ]; then
grep -i "<add key=\"dotnet6-internal\"" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet6-internal to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet6-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=('dotnet6-internal')
grep -i "<add key=\"dotnet6-internal-transport\">" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet6-internal-transport to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet6-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=('dotnet6-internal-transport')
fi
# I want things split line by line
PrevIFS=$IFS
IFS=$'\n'

4
eng/common/build.sh

@ -187,6 +187,10 @@ function InitializeCustomToolset {
}
function Build {
if [[ "$ci" == true ]]; then
TryLogClientIpAddress
fi
InitializeToolset
InitializeCustomToolset

42
eng/common/cross/build-rootfs.sh

@ -6,10 +6,10 @@ usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
echo " for FreeBSD can be: freebsd11 or freebsd12."
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
echo " for FreeBSD can be: freebsd11, freebsd12, freebsd13"
echo " for illumos can be: illumos."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
exit 1
@ -33,7 +33,6 @@ __AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
__AlpinePackagesEdgeCommunity=" lldb-dev"
__AlpinePackagesEdgeMain=" llvm10-libs"
__AlpinePackagesEdgeMain+=" python3"
__AlpinePackagesEdgeMain+=" libedit"
@ -61,13 +60,15 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
__FreeBSDBase="12.1-RELEASE"
__FreeBSDBase="12.2-RELEASE"
__FreeBSDPkg="1.12.0"
__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" lttng-ust"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu-64.2nb2"
__IllumosPackages+=" mit-krb5-1.16.2nb4"
@ -115,6 +116,8 @@ while :; do
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
unset __LLDB_Package
;;
x86)
@ -143,11 +146,6 @@ while :; do
no-lldb)
unset __LLDB_Package
;;
trusty) # Ubuntu 14.04
if [ "$__CodeName" != "jessie" ]; then
__CodeName=trusty
fi
;;
xenial) # Ubuntu 16.04
if [ "$__CodeName" != "jessie" ]; then
__CodeName=xenial
@ -191,6 +189,8 @@ while :; do
__CodeName=alpine
__UbuntuRepo=
__AlpineVersion=3.9
__AlpinePackagesEdgeMain+=" llvm11-libs"
__AlpinePackagesEdgeMain+=" clang-libs"
;;
alpine3.13)
__CodeName=alpine
@ -201,15 +201,24 @@ while :; do
__AlpinePackagesEdgeCommunity=
__AlpinePackages+=$__AlpinePackagesEdgeMain
__AlpinePackagesEdgeMain=
__AlpinePackages+=" llvm10-libs"
;;
freebsd11)
__FreeBSDBase="11.3-RELEASE"
__FreeBSDABI="11"
;&
freebsd12)
__CodeName=freebsd
__BuildArch=x64
__SkipUnmount=1
;;
freebsd13)
__CodeName=freebsd
__FreeBSDBase="13.0-RELEASE"
__FreeBSDABI="13"
__BuildArch=x64
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__BuildArch=x64
@ -287,9 +296,9 @@ if [[ "$__CodeName" == "alpine" ]]; then
rm -r $__ApkToolsDir
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p $__RootfsDir/usr/local/etc
JOBS="$(getconf _NPROCESSORS_ONLN)"
wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
# For now, ask for 11 ABI even on 12. This can be revisited later.
echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
echo "ABI = \"FreeBSD:${__FreeBSDABI}:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
mkdir -p $__RootfsDir/tmp
# get and build package manager
@ -297,7 +306,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
# needed for install to succeed
mkdir -p $__RootfsDir/host/etc
./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
./autogen.sh && ./configure --prefix=$__RootfsDir/host && make -j "$JOBS" && make install
rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
# install packages we need.
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
@ -360,13 +369,6 @@ elif [[ -n $__CodeName ]]; then
umount $__RootfsDir/* || true
fi
if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
popd
fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch

4
eng/common/cross/toolchain.cmake

@ -138,8 +138,8 @@ function(add_toolchain_linker_flag Flag)
if (NOT Config STREQUAL "")
set(CONFIG_SUFFIX "_${Config}")
endif()
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
endfunction()
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")

2
eng/common/init-tools-native.sh

@ -10,7 +10,7 @@ force=false
download_retries=5
retry_wait_time_seconds=30
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
declare -A native_assets
declare -a native_assets
. $scriptroot/pipeline-logging-functions.sh
. $scriptroot/native/common-library.sh

3
eng/common/internal/Tools.csproj

@ -1,5 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>

8
eng/common/native/common-library.sh

@ -148,8 +148,12 @@ function NewScriptShim {
fi
if [[ ! -f $tool_file_path ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
return 1
# try to see if the path is lower cased
tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
if [[ ! -f $tool_file_path ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
return 1
fi
fi
local shim_contents=$'#!/usr/bin/env bash\n'

25
eng/common/post-build/sourcelink-validation.ps1

@ -17,6 +17,7 @@ $global:RepoFiles = @{}
$MaxParallelJobs = 16
$MaxRetries = 5
$RetryWaitTimeInSeconds = 30
# Wait time between check for system load
$SecondsBetweenLoadChecks = 10
@ -99,21 +100,25 @@ $ValidatePackage = {
$Status = 200
$Cache = $using:RepoFiles
$totalRetries = 0
$attempts = 0
while ($totalRetries -lt $using:MaxRetries) {
while ($attempts -lt $using:MaxRetries) {
if ( !($Cache.ContainsKey($FilePath)) ) {
try {
$Uri = $Link -as [System.URI]
# Only GitHub links are valid
if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
if ($Link -match "submodules") {
# Skip submodule links until sourcelink properly handles submodules
$Status = 200
}
elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
# Only GitHub links are valid
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
}
else {
# If it's not a github link, we want to break out of the loop and not retry.
$Status = 0
$totalRetries = $using:MaxRetries
$attempts = $using:MaxRetries
}
}
catch {
@ -123,9 +128,15 @@ $ValidatePackage = {
}
if ($Status -ne 200) {
$totalRetries++
$attempts++
if ($totalRetries -ge $using:MaxRetries) {
if ($attempts -lt $using:MaxRetries)
{
$attemptsLeft = $using:MaxRetries - $attempts
Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
}
else {
if ($NumFailedLinks -eq 0) {
if ($FailedFiles.Value -eq 0) {
Write-Host

3
eng/common/sdk-task.ps1

@ -83,6 +83,9 @@ try {
}
if ($restore) {
if ($ci) {
Try-LogClientIpAddress
}
Build 'Restore'
}

109
eng/common/sdl/configure-sdl-tool.ps1

@ -0,0 +1,109 @@
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
[string] $TargetDirectory,
[string] $GdnFolder,
# The list of Guardian tools to configure. For each object in the array:
# - If the item is a [hashtable], it must contain these entries:
# - Name = The tool name as Guardian knows it.
# - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
# among all tool entries with the same Name.
# - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
# - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
[object[]] $ToolsList,
[string] $GuardianLoggerLevel='Standard',
# Optional: Additional params to add to any tool using CredScan.
[string[]] $CrScanAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using PoliCheck.
[string[]] $PoliCheckAdditionalRunConfigParams
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
# Normalize tools list: all in [hashtable] form with defined values for each key.
$ToolsList = $ToolsList |
ForEach-Object {
if ($_ -is [string]) {
$_ = @{ Name = $_ }
}
if (-not ($_['Scenario'])) { $_.Scenario = "" }
if (-not ($_['Args'])) { $_.Args = @() }
$_
}
Write-Host "List of tools to configure:"
$ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
# We store config files in the r directory of .gdn
$gdnConfigPath = Join-Path $GdnFolder 'r'
$ValidPath = Test-Path $GuardianCliLocation
if ($ValidPath -eq $False)
{
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
ExitWithExitCode 1
}
foreach ($tool in $ToolsList) {
# Put together the name and scenario to make a unique key.
$toolConfigName = $tool.Name
if ($tool.Scenario) {
$toolConfigName += "_" + $tool.Scenario
}
Write-Host "=== Configuring $toolConfigName..."
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
# For some tools, add default and automatic args.
if ($tool.Name -eq 'credscan') {
if ($targetDirectory) {
$tool.Args += "TargetDirectory < $TargetDirectory"
}
$tool.Args += "OutputType < pre"
$tool.Args += $CrScanAdditionalRunConfigParams
} elseif ($tool.Name -eq 'policheck') {
if ($targetDirectory) {
$tool.Args += "Target < $TargetDirectory"
}
$tool.Args += $PoliCheckAdditionalRunConfigParams
}
# Create variable pointing to the args array directly so we can use splat syntax later.
$toolArgs = $tool.Args
# Configure the tool. If args array is provided or the current tool has some default arguments
# defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
# one per parameter. Doc page for "guardian configure":
# https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
Exec-BlockVerbosely {
& $GuardianCliLocation configure `
--working-directory $WorkingDirectory `
--tool $tool.Name `
--output-path $gdnConfigFile `
--logger-level $GuardianLoggerLevel `
--noninteractive `
--force `
$(if ($toolArgs) { "--args" }) @toolArgs
Exit-IfNZEC "Sdl"
}
Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
}
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}

71
eng/common/sdl/execute-all-sdl-tools.ps1

@ -7,8 +7,17 @@ Param(
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
[string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
[string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
[string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
# Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
# format.
[object[]] $SourceToolsList,
# Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
# list format.
[object[]] $ArtifactToolsList,
# Optional: list of SDL tools to run without automatically specifying a target directory. See
# 'configure-sdl-tool.ps1' for tools list format.
[object[]] $CustomToolsList,
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
[string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
@ -63,13 +72,16 @@ try {
ExitWithExitCode 1
}
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
}
$gdnFolder = Join-Path $workingDirectory '.gdn'
if ($TsaOnboard) {
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
Exec-BlockVerbosely {
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
@ -80,11 +92,41 @@ try {
}
}
if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
# Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
if ($tools -and $tools.Count -gt 0) {
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
-GuardianCliLocation $guardianCliLocation `
-WorkingDirectory $workingDirectory `
-TargetDirectory $targetDirectory `
-GdnFolder $gdnFolder `
-ToolsList $tools `
-AzureDevOpsAccessToken $AzureDevOpsAccessToken `
-GuardianLoggerLevel $GuardianLoggerLevel `
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
if ($BreakOnFailure) {
Exit-IfNZEC "Sdl"
}
}
}
}
if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
# Configure Artifact and Source tools with default Target directories.
Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
Configure-ToolsList $SourceToolsList $SourceDirectory
# Configure custom tools with no default Target directory.
Configure-ToolsList $CustomToolsList $null
# At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
# (If we used "run" multiple times, each run would overwrite data from earlier runs.)
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
-GuardianCliLocation $guardianCliLocation `
-WorkingDirectory $workingDirectory `
-UpdateBaseline $UpdateBaseline `
-GdnFolder $gdnFolder
}
if ($TsaPublish) {
@ -92,8 +134,9 @@ try {
if (-not $TsaRepositoryName) {
$TsaRepositoryName = "$($Repository)-$($BranchName)"
}
Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
Exec-BlockVerbosely {
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
@ -106,7 +149,11 @@ try {
if ($BreakOnFailure) {
Write-Host "Failing the build in case of breaking results..."
& $guardianCliLocation break
Exec-BlockVerbosely {
& $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
} else {
Write-Host "Letting the build pass even if there were breaking results..."
}
}
catch {

63
eng/common/sdl/extract-artifact-archives.ps1

@ -0,0 +1,63 @@
# This script looks for each archive file in a directory and extracts it into the target directory.
# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
param(
# Full path to directory where archives are stored.
[Parameter(Mandatory=$true)][string] $InputPath,
# Full path to directory to extract archives into. May be the same as $InputPath.
[Parameter(Mandatory=$true)][string] $ExtractPath
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
Measure-Command {
$jobs = @()
# Find archive files for non-Windows and Windows builds.
$archiveFiles = @(
Get-ChildItem (Join-Path $InputPath "*.tar.gz")
Get-ChildItem (Join-Path $InputPath "*.zip")
)
foreach ($targzFile in $archiveFiles) {
$jobs += Start-Job -ScriptBlock {
$file = $using:targzFile
$fileName = [System.IO.Path]::GetFileName($file)
$extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
New-Item $extractDir -ItemType Directory -Force | Out-Null
Write-Host "Extracting '$file' to '$extractDir'..."
# Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
# This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
# error. Save output so it can be stored in the exception string along with context.
$output = tar -xf $file -C $extractDir 2>&1
# Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
# don't have access to the outer scope.
if ($LASTEXITCODE -ne 0) {
throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
}
Write-Host "Extracted to $extractDir"
}
}
Receive-Job $jobs -Wait
}
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}

50
eng/common/sdl/run-sdl.ps1

@ -1,13 +1,9 @@
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
[string] $TargetDirectory,
[string] $GdnFolder,
[string[]] $ToolsList,
[string] $UpdateBaseline,
[string] $GuardianLoggerLevel='Standard',
[string[]] $CrScanAdditionalRunConfigParams,
[string[]] $PoliCheckAdditionalRunConfigParams
[string] $GuardianLoggerLevel='Standard'
)
$ErrorActionPreference = 'Stop'
@ -23,7 +19,6 @@ try {
. $PSScriptRoot\..\tools.ps1
# We store config files in the r directory of .gdn
Write-Host $ToolsList
$gdnConfigPath = Join-Path $GdnFolder 'r'
$ValidPath = Test-Path $GuardianCliLocation
@ -33,37 +28,18 @@ try {
ExitWithExitCode 1
}
$configParam = @('--config')
foreach ($tool in $ToolsList) {
$gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
Write-Host $tool
# We have to manually configure tools that run on source to look at the source directory only
if ($tool -eq 'credscan') {
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
}
if ($tool -eq 'policheck') {
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
}
$configParam+=$gdnConfigFile
}
Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
$gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
Write-Host "Discovered Guardian config files:"
$gdnConfigFiles | Out-String | Write-Host
Exec-BlockVerbosely {
& $GuardianCliLocation run `
--working-directory $WorkingDirectory `
--baseline mainbaseline `
--update-baseline $UpdateBaseline `
--logger-level $GuardianLoggerLevel `
--config @gdnConfigFiles
Exit-IfNZEC "Sdl"
}
}
catch {

108
eng/common/templates/job/execute-sdl.yml

@ -2,17 +2,41 @@ parameters:
enable: 'false' # Whether the SDL validation job should execute or not
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
# Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
# diagnosis of problems with specific tool configurations.
publishGuardianDirectoryToPipeline: false
# The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
# parameters rather than relying on YAML. It may be better to use a local script, because you can
# reproduce results locally without piecing together a command based on the YAML.
executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
downloadArtifacts: true # optional: determines if the artifacts should be dowloaded
# optional: determines if build artifacts should be downloaded.
downloadArtifacts: true
# optional: determines if this job should search the directory of downloaded artifacts for
# 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
extractArchiveArtifacts: false
dependsOn: '' # Optional: dependencies of the job
artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
# Usage:
# artifactNames:
# - 'BlobArtifacts'
# - 'Artifacts_Windows_NT_Release'
# Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
# not pipeline artifacts, so doesn't affect the use of this parameter.
pipelineArtifactNames: []
# Optional: location and ID of the AzDO build that the build/pipeline artifacts should be
# downloaded from. By default, uses runtime expressions to decide based on the variables set by
# the 'setupMaestroVars' dependency. Overriding this parameter is necessary if SDL tasks are
# running without Maestro++/BAR involved, or to download artifacts from a specific existing build
# to iterate quickly on SDL changes.
AzDOProjectName: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
AzDOPipelineId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
AzDOBuildId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
jobs:
- job: Run_SDL
@ -22,16 +46,29 @@ jobs:
variables:
- group: DotNet-VSTS-Bot
- name: AzDOProjectName
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
value: ${{ parameters.AzDOProjectName }}
- name: AzDOPipelineId
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
value: ${{ parameters.AzDOPipelineId }}
- name: AzDOBuildId
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
value: ${{ parameters.AzDOBuildId }}
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.53.3
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
pool:
name: Hosted VS2017
# To extract archives (.tar.gz, .zip), we need access to "tar", added in Windows 10/2019.
${{ if eq(parameters.extractArchiveArtifacts, 'false') }}:
name: Hosted VS2017
${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
vmImage: windows-2019
steps:
- checkout: self
clean: true
- ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- ${{ if ne(parameters.artifactNames, '') }}:
- ${{ each artifactName in parameters.artifactNames }}:
@ -59,16 +96,51 @@ jobs:
itemPattern: "**"
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
- ${{ each artifactName in parameters.pipelineArtifactNames }}:
- task: DownloadPipelineArtifact@2
displayName: Download Pipeline Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: ${{ artifactName }}
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
displayName: Extract Blob Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
displayName: Extract Package Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- powershell: eng/common/sdl/extract-artifact-archives.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
displayName: Extract Archive Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- powershell: |
$content = Get-Content $(GuardianPackagesConfigFile)
Write-Host "packages.config content was:`n$content"
$content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)')
$content | Set-Content $(GuardianPackagesConfigFile)
Write-Host "packages.config content updated to:`n$content"
displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }}
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
- task: NuGetCommand@2
@ -79,15 +151,35 @@ jobs:
nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
externalFeedCredentials: GuardianConnect
restoreDirectory: $(Build.SourcesDirectory)\.packages
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if eq(parameters.overrideParameters, '') }}:
- powershell: eng/common/sdl/execute-all-sdl-tools.ps1
-GuardianPackageName Microsoft.Guardian.Cli.0.53.3
- powershell: ${{ parameters.executeAllSdlToolsScript }}
-GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion)
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
${{ parameters.additionalParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
# We want to publish the Guardian results and configuration for easy diagnosis. However, the
# '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
# tooling files. Some of these files are large and aren't useful during an investigation, so
# exclude them by simply deleting them before publishing. (As of writing, there is no documented
# way to selectively exclude a dir from the pipeline artifact publish task.)
- task: DeleteFiles@1
displayName: Delete Guardian dependencies to avoid uploading
inputs:
SourceFolder: $(Agent.BuildDirectory)/.gdn
Contents: |
c
i
condition: succeededOrFailed()
- publish: $(Agent.BuildDirectory)/.gdn
artifact: GuardianConfiguration
displayName: Publish GuardianConfiguration
condition: succeededOrFailed()

2
eng/common/templates/job/job.yml

@ -103,7 +103,7 @@ jobs:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@2
- task: MicroBuildSigningPlugin@3
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)

26
eng/common/templates/job/publish-build-assets.yml

@ -94,7 +94,31 @@ jobs:
PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- task: powershell@2
displayName: Check if SymbolPublishingExclusionsFile.txt exists
inputs:
targetType: inline
script: |
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if(Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
}
else{
Write-Host "Symbols Exclusion file does not exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
}
- task: PublishBuildArtifacts@1
displayName: Publish SymbolPublishingExclusionsFile Artifact
condition: eq(variables['SymbolExclusionFile'], 'true')
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates/steps/publish-logs.yml
parameters:

21
eng/common/templates/job/source-index-stage1.yml

@ -34,29 +34,24 @@ jobs:
inputs:
packageType: sdk
version: 3.1.x
- task: UseDotNet@2
displayName: Use .NET Core sdk
inputs:
useGlobalJson: true
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools
dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools
echo ##vso[task.prependpath]$(Build.SourcesDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- script: BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: Process Binlog into indexable sln
env:
DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- script: UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
displayName: Upload stage1 artifacts to source index
env:
BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2

31
eng/common/templates/steps/source-build.yml

@ -18,6 +18,35 @@ steps:
set -x
df -h
# If building on the internal project, the artifact feeds variable may be available (usually only if needed)
# In that case, call the feed setup script to add internal feeds corresponding to public ones.
# In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
# This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
# changes.
$internalRestoreArgs=
if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
# Temporarily work around https://github.com/dotnet/arcade/issues/7709
chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
$(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
# The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
# This only works if there is a username/email configured, which won't be the case in most CI runs.
git config --get user.email
if [ $? -ne 0 ]; then
git config user.email dn-bot@microsoft.com
git config user.name dn-bot
fi
fi
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetclimsrc-read-sas-token-base64)' != '$''(dotnetclimsrc-read-sas-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetclimsrc.blob.core.windows.net/dotnet /p:DotNetRuntimeSourceFeedKey=$(dotnetclimsrc-read-sas-token-base64) --runtimesourcefeed https://dotnetclimsrc.blob.core.windows.net/dotnet --runtimesourcefeedkey $(dotnetclimsrc-read-sas-token-base64)'
fi
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
@ -43,6 +72,8 @@ steps:
--configuration $buildConfig \
--restore --build --pack $publishArgs -bl \
$officialBuildArgs \
$internalRuntimeDownloadArgs \
$internalRestoreArgs \
$targetRidArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true

108
eng/common/tools.ps1

@ -42,7 +42,7 @@
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dot.net/v1/dotnet-install.ps1
# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@ -106,6 +106,46 @@ function Exec-Process([string]$command, [string]$commandArgs) {
}
}
# Take the given block, print it, print what the block probably references from the current set of
# variables using low-effort string matching, then run the block.
#
# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes,
# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less
# maintainable and less reliable. It is easy to make a mistake and modify the command without
# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of
# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in
# existing source code can also be difficult, because the strings are not aligned to each other and
# the line may be 300+ columns long.
#
# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues.
#
# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically.
# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it
# doesn't print any info about the variables being used by the command, which is normally the
# interesting part to diagnose.
function Exec-BlockVerbosely([scriptblock] $block) {
Write-Host "--- Running script block:"
$blockString = $block.ToString().Trim()
Write-Host $blockString
Write-Host "--- List of variables that might be used:"
# For each variable x in the environment, check the block for a reference to x via simple "$x" or
# "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x",
# among others). It only catches what this function was originally written for: simple
# command-line commands.
$variableTable = Get-Variable |
Where-Object {
$blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)")
} |
Format-Table -AutoSize -HideTableHeaders -Wrap |
Out-String
Write-Host $variableTable.Trim()
Write-Host "--- Executing:"
& $block
Write-Host "--- Done running script block!"
}
# createSdkLocationFile parameter enables a file being generated under the toolset directory
# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
# as dot sourcing isn't possible.
@ -123,6 +163,9 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
# In case of network error, try to log the current IP for reference
Try-LogClientIpAddress
}
# Source Build uses DotNetCoreSdkDir variable
@ -223,7 +266,7 @@ function GetDotNetInstallScript([string] $dotnetRoot) {
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
$uri = "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1"
$uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
Retry({
Write-Host "GET $uri"
@ -378,7 +421,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe"
$local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin"
$local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false }
if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) {
$global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe"
} else {
$global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe"
}
return $global:_MSBuildExe
}
function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
@ -623,6 +675,17 @@ function ExitWithExitCode([int] $exitCode) {
exit $exitCode
}
# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for
# diagnostics, then exit the script with the $LASTEXITCODE.
function Exit-IfNZEC([string] $category = "General") {
Write-Host "Exit code $LASTEXITCODE"
if ($LASTEXITCODE -ne 0) {
$message = "Last command failed with exit code $LASTEXITCODE."
Write-PipelineTelemetryError -Force -Category $category -Message $message
ExitWithExitCode $LASTEXITCODE
}
}
function Stop-Processes() {
Write-Host 'Killing running build processes...'
foreach ($processName in $processesToStopOnExit) {
@ -646,6 +709,8 @@ function MSBuild() {
Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
}
Enable-Nuget-EnhancedRetry
$toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject
$possiblePaths = @(
@ -654,6 +719,8 @@ function MSBuild() {
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
(Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
(Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
)
$selectedPath = $null
foreach ($path in $possiblePaths) {
@ -690,6 +757,8 @@ function MSBuild-Core() {
}
}
Enable-Nuget-EnhancedRetry
$buildTool = InitializeBuildTool
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
@ -812,3 +881,36 @@ if (!$disableConfigureToolsetImport) {
}
}
}
function Try-LogClientIpAddress()
{
Write-Host "Attempting to log this client's IP for Azure Package feed telemetry purposes"
try
{
$result = Invoke-WebRequest -Uri "http://co1.msedge.net/fdv2/diagnostics.aspx" -UseBasicParsing
$lines = $result.Content.Split([Environment]::NewLine)
$socketIp = $lines | Select-String -Pattern "^Socket IP:.*"
Write-Host $socketIp
$clientIp = $lines | Select-String -Pattern "^Client IP:.*"
Write-Host $clientIp
}
catch
{
Write-Host "Unable to get this machine's effective IP address for logging: $_"
}
}
#
# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic.
#
function Enable-Nuget-EnhancedRetry() {
if ($ci) {
Write-Host "Setting NUGET enhanced retry environment variables"
$env:NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY = 'true'
$env:NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT = 6
$env:NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS = 1000
Write-PipelineSetVariable -Name 'NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY' -Value 'true'
Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT' -Value '6'
Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000'
}
}

20
eng/common/tools.sh

@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true}
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://dot.net/v1/dotnet-install.sh
# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@ -262,7 +262,7 @@ function with_retries {
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh"
local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
@ -399,6 +399,13 @@ function StopProcesses {
return 0
}
function TryLogClientIpAddress () {
echo 'Attempting to log this client''s IP for Azure Package feed telemetry purposes'
if command -v curl > /dev/null; then
curl -s 'http://co1.msedge.net/fdv2/diagnostics.aspx' | grep ' IP: ' || true
fi
}
function MSBuild {
local args=$@
if [[ "$pipelines_log" == true ]]; then
@ -410,6 +417,13 @@ function MSBuild {
export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
export NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY=true
export NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT=6
export NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS=1000
Write-PipelineSetVariable -name "NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY" -value "true"
Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT" -value "6"
Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS" -value "1000"
fi
local toolset_dir="${_InitializeToolset%/*}"
@ -420,6 +434,8 @@ function MSBuild {
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
for path in "${possiblePaths[@]}"; do
if [[ -f $path ]]; then
selectedPath=$path

8
global.json

@ -1,4 +1,8 @@
{
"sdk": {
"version": "6.0.100"
},
"tools": {
"dotnet": "6.0.100",
@ -12,7 +16,7 @@
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "6.0.0-beta.21316.1",
"Microsoft.DotNet.Helix.Sdk": "6.0.0-beta.21316.1"
"Microsoft.DotNet.Arcade.Sdk": "6.0.0-beta.21519.3",
"Microsoft.DotNet.Helix.Sdk": "6.0.0-beta.21519.3"
}
}

Loading…
Cancel
Save