86 changed files with 2144 additions and 692 deletions
@ -1,2 +1,2 @@ |
|||
@echo off |
|||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" |
|||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" |
|||
|
|||
@ -0,0 +1,28 @@ |
|||
parameters: |
|||
# Enable cleanup tasks for MicroBuild |
|||
enableMicrobuild: false |
|||
# Enable cleanup tasks for MicroBuild on Mac and Linux |
|||
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' |
|||
enableMicrobuildForMacAndLinux: false |
|||
continueOnError: false |
|||
|
|||
steps: |
|||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}: |
|||
- task: MicroBuildCleanup@1 |
|||
displayName: Execute Microbuild cleanup tasks |
|||
condition: and( |
|||
always(), |
|||
or( |
|||
and( |
|||
eq(variables['Agent.Os'], 'Windows_NT'), |
|||
in(variables['_SignType'], 'real', 'test') |
|||
), |
|||
and( |
|||
${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, |
|||
ne(variables['Agent.Os'], 'Windows_NT'), |
|||
eq(variables['_SignType'], 'real') |
|||
) |
|||
)) |
|||
continueOnError: ${{ parameters.continueOnError }} |
|||
env: |
|||
TeamName: $(_TeamName) |
|||
@ -0,0 +1,90 @@ |
|||
parameters: |
|||
# Enable install tasks for MicroBuild |
|||
enableMicrobuild: false |
|||
# Enable install tasks for MicroBuild on Mac and Linux |
|||
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' |
|||
enableMicrobuildForMacAndLinux: false |
|||
# Determines whether the ESRP service connection information should be passed to the signing plugin. |
|||
# This overlaps with _SignType to some degree. We only need the service connection for real signing. |
|||
# It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place. |
|||
# Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod. |
|||
# Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The |
|||
# variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough. |
|||
microbuildUseESRP: true |
|||
|
|||
continueOnError: false |
|||
|
|||
steps: |
|||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}: |
|||
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: |
|||
# Installing .NET 8 is required to use the MicroBuild signing plugin on non-Windows platforms |
|||
- task: UseDotNet@2 |
|||
displayName: Install .NET 8.0 SDK for MicroBuild Plugin |
|||
inputs: |
|||
packageType: sdk |
|||
version: 8.0.x |
|||
# Installing the SDK in a '.dotnet-microbuild' directory is required for signing. |
|||
# See target FindDotNetPathForMicroBuild in arcade/src/Microsoft.DotNet.Arcade.Sdk/tools/Sign.proj |
|||
# Do not remove '.dotnet-microbuild' from the path without changing the corresponding logic. |
|||
installationPath: $(Agent.TempDirectory)/.dotnet-microbuild |
|||
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) |
|||
|
|||
- script: | |
|||
REM Check if ESRP is disabled while SignType is real |
|||
if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" ( |
|||
echo Error: ESRP must be enabled when SignType is real. |
|||
exit /b 1 |
|||
) |
|||
displayName: 'Validate ESRP usage (Windows)' |
|||
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT')) |
|||
- script: | |
|||
# Check if ESRP is disabled while SignType is real |
|||
if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then |
|||
echo "Error: ESRP must be enabled when SignType is real." |
|||
exit 1 |
|||
fi |
|||
displayName: 'Validate ESRP usage (Non-Windows)' |
|||
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) |
|||
|
|||
# Two different MB install steps. This is due to not being able to use the agent OS during |
|||
# YAML expansion, and Windows vs. Linux/Mac uses different service connections. However, |
|||
# we can avoid including the MB install step if not enabled at all. This avoids a bunch of |
|||
# extra pipeline authorizations, since most pipelines do not sign on non-Windows. |
|||
- task: MicroBuildSigningPlugin@4 |
|||
displayName: Install MicroBuild plugin (Windows) |
|||
inputs: |
|||
signType: $(_SignType) |
|||
zipSources: false |
|||
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json |
|||
${{ if eq(parameters.microbuildUseESRP, true) }}: |
|||
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' |
|||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: |
|||
ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea |
|||
${{ else }}: |
|||
ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca |
|||
env: |
|||
TeamName: $(_TeamName) |
|||
MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild |
|||
SYSTEM_ACCESSTOKEN: $(System.AccessToken) |
|||
continueOnError: ${{ parameters.continueOnError }} |
|||
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) |
|||
|
|||
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: |
|||
- task: MicroBuildSigningPlugin@4 |
|||
displayName: Install MicroBuild plugin (non-Windows) |
|||
inputs: |
|||
signType: $(_SignType) |
|||
zipSources: false |
|||
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json |
|||
${{ if eq(parameters.microbuildUseESRP, true) }}: |
|||
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' |
|||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: |
|||
ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 |
|||
${{ else }}: |
|||
ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc |
|||
env: |
|||
TeamName: $(_TeamName) |
|||
MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild |
|||
SYSTEM_ACCESSTOKEN: $(System.AccessToken) |
|||
continueOnError: ${{ parameters.continueOnError }} |
|||
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) |
|||
@ -0,0 +1,35 @@ |
|||
parameters: |
|||
sourceIndexUploadPackageVersion: 2.0.0-20250818.1 |
|||
sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1 |
|||
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json |
|||
binlogPath: artifacts/log/Debug/Build.binlog |
|||
|
|||
steps: |
|||
- task: UseDotNet@2 |
|||
displayName: "Source Index: Use .NET 9 SDK" |
|||
inputs: |
|||
packageType: sdk |
|||
version: 9.0.x |
|||
installationPath: $(Agent.TempDirectory)/dotnet |
|||
workingDirectory: $(Agent.TempDirectory) |
|||
|
|||
- script: | |
|||
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools |
|||
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools |
|||
displayName: "Source Index: Download netsourceindex Tools" |
|||
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. |
|||
workingDirectory: $(Agent.TempDirectory) |
|||
|
|||
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output |
|||
displayName: "Source Index: Process Binlog into indexable sln" |
|||
|
|||
- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: |
|||
- task: AzureCLI@2 |
|||
displayName: "Source Index: Upload Source Index stage1 artifacts to Azure" |
|||
inputs: |
|||
azureSubscription: 'SourceDotNet Stage1 Publish' |
|||
addSpnToEnvironment: true |
|||
scriptType: 'ps' |
|||
scriptLocation: 'inlineScript' |
|||
inlineScript: | |
|||
$(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 |
|||
@ -0,0 +1,334 @@ |
|||
#!/usr/bin/env python3 |
|||
|
|||
import argparse |
|||
import asyncio |
|||
import aiohttp |
|||
import gzip |
|||
import os |
|||
import re |
|||
import shutil |
|||
import subprocess |
|||
import sys |
|||
import tarfile |
|||
import tempfile |
|||
import zstandard |
|||
|
|||
from collections import deque |
|||
from functools import cmp_to_key |
|||
|
|||
async def download_file(session, url, dest_path, max_retries=3, retry_delay=2, timeout=60): |
|||
"""Asynchronous file download with retries.""" |
|||
attempt = 0 |
|||
while attempt < max_retries: |
|||
try: |
|||
async with session.get(url, timeout=aiohttp.ClientTimeout(total=timeout)) as response: |
|||
if response.status == 200: |
|||
with open(dest_path, "wb") as f: |
|||
content = await response.read() |
|||
f.write(content) |
|||
print(f"Downloaded {url} at {dest_path}") |
|||
return |
|||
else: |
|||
print(f"Failed to download {url}, Status Code: {response.status}") |
|||
break |
|||
except (asyncio.CancelledError, asyncio.TimeoutError, aiohttp.ClientError) as e: |
|||
print(f"Error downloading {url}: {type(e).__name__} - {e}. Retrying...") |
|||
|
|||
attempt += 1 |
|||
await asyncio.sleep(retry_delay) |
|||
|
|||
print(f"Failed to download {url} after {max_retries} attempts.") |
|||
|
|||
async def download_deb_files_parallel(mirror, packages, tmp_dir): |
|||
"""Download .deb files in parallel.""" |
|||
os.makedirs(tmp_dir, exist_ok=True) |
|||
|
|||
tasks = [] |
|||
timeout = aiohttp.ClientTimeout(total=60) |
|||
async with aiohttp.ClientSession(timeout=timeout) as session: |
|||
for pkg, info in packages.items(): |
|||
filename = info.get("Filename") |
|||
if filename: |
|||
url = f"{mirror}/{filename}" |
|||
dest_path = os.path.join(tmp_dir, os.path.basename(filename)) |
|||
tasks.append(asyncio.create_task(download_file(session, url, dest_path))) |
|||
|
|||
await asyncio.gather(*tasks) |
|||
|
|||
async def download_package_index_parallel(mirror, arch, suites): |
|||
"""Download package index files for specified suites and components entirely in memory.""" |
|||
tasks = [] |
|||
timeout = aiohttp.ClientTimeout(total=60) |
|||
|
|||
async with aiohttp.ClientSession(timeout=timeout) as session: |
|||
for suite in suites: |
|||
for component in ["main", "universe"]: |
|||
url = f"{mirror}/dists/{suite}/{component}/binary-{arch}/Packages.gz" |
|||
tasks.append(fetch_and_decompress(session, url)) |
|||
|
|||
results = await asyncio.gather(*tasks, return_exceptions=True) |
|||
|
|||
merged_content = "" |
|||
for result in results: |
|||
if isinstance(result, str): |
|||
if merged_content: |
|||
merged_content += "\n\n" |
|||
merged_content += result |
|||
|
|||
return merged_content |
|||
|
|||
async def fetch_and_decompress(session, url): |
|||
"""Fetch and decompress the Packages.gz file.""" |
|||
try: |
|||
async with session.get(url) as response: |
|||
if response.status == 200: |
|||
compressed_data = await response.read() |
|||
decompressed_data = gzip.decompress(compressed_data).decode('utf-8') |
|||
print(f"Downloaded index: {url}") |
|||
return decompressed_data |
|||
else: |
|||
print(f"Skipped index: {url} (doesn't exist)") |
|||
return None |
|||
except Exception as e: |
|||
print(f"Error fetching {url}: {e}") |
|||
|
|||
def parse_debian_version(version): |
|||
"""Parse a Debian package version into epoch, upstream version, and revision.""" |
|||
match = re.match(r'^(?:(\d+):)?([^-]+)(?:-(.+))?$', version) |
|||
if not match: |
|||
raise ValueError(f"Invalid Debian version format: {version}") |
|||
epoch, upstream, revision = match.groups() |
|||
return int(epoch) if epoch else 0, upstream, revision or "" |
|||
|
|||
def compare_upstream_version(v1, v2): |
|||
"""Compare upstream or revision parts using Debian rules.""" |
|||
def tokenize(version): |
|||
tokens = re.split(r'([0-9]+|[A-Za-z]+)', version) |
|||
return [int(x) if x.isdigit() else x for x in tokens if x] |
|||
|
|||
tokens1 = tokenize(v1) |
|||
tokens2 = tokenize(v2) |
|||
|
|||
for token1, token2 in zip(tokens1, tokens2): |
|||
if type(token1) == type(token2): |
|||
if token1 != token2: |
|||
return (token1 > token2) - (token1 < token2) |
|||
else: |
|||
return -1 if isinstance(token1, str) else 1 |
|||
|
|||
return len(tokens1) - len(tokens2) |
|||
|
|||
def compare_debian_versions(version1, version2): |
|||
"""Compare two Debian package versions.""" |
|||
epoch1, upstream1, revision1 = parse_debian_version(version1) |
|||
epoch2, upstream2, revision2 = parse_debian_version(version2) |
|||
|
|||
if epoch1 != epoch2: |
|||
return epoch1 - epoch2 |
|||
|
|||
result = compare_upstream_version(upstream1, upstream2) |
|||
if result != 0: |
|||
return result |
|||
|
|||
return compare_upstream_version(revision1, revision2) |
|||
|
|||
def resolve_dependencies(packages, aliases, desired_packages): |
|||
"""Recursively resolves dependencies for the desired packages.""" |
|||
resolved = [] |
|||
to_process = deque(desired_packages) |
|||
|
|||
while to_process: |
|||
current = to_process.popleft() |
|||
resolved_package = current if current in packages else aliases.get(current, [None])[0] |
|||
|
|||
if not resolved_package: |
|||
print(f"Error: Package '{current}' was not found in the available packages.") |
|||
sys.exit(1) |
|||
|
|||
if resolved_package not in resolved: |
|||
resolved.append(resolved_package) |
|||
|
|||
deps = packages.get(resolved_package, {}).get("Depends", "") |
|||
if deps: |
|||
deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep] |
|||
for dep in deps: |
|||
if dep not in resolved and dep not in to_process and dep in packages: |
|||
to_process.append(dep) |
|||
|
|||
return resolved |
|||
|
|||
def parse_package_index(content): |
|||
"""Parses the Packages.gz file and returns package information.""" |
|||
packages = {} |
|||
aliases = {} |
|||
entries = re.split(r'\n\n+', content) |
|||
|
|||
for entry in entries: |
|||
fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE)) |
|||
if "Package" in fields: |
|||
package_name = fields["Package"] |
|||
version = fields.get("Version") |
|||
filename = fields.get("Filename") |
|||
depends = fields.get("Depends") |
|||
provides = fields.get("Provides", None) |
|||
|
|||
# Only update if package_name is not in packages or if the new version is higher |
|||
if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0: |
|||
packages[package_name] = { |
|||
"Version": version, |
|||
"Filename": filename, |
|||
"Depends": depends |
|||
} |
|||
|
|||
# Update aliases if package provides any alternatives |
|||
if provides: |
|||
provides_list = [x.strip() for x in provides.split(",")] |
|||
for alias in provides_list: |
|||
# Strip version specifiers |
|||
alias_name = re.sub(r'\s*\(=.*\)', '', alias) |
|||
if alias_name not in aliases: |
|||
aliases[alias_name] = [] |
|||
if package_name not in aliases[alias_name]: |
|||
aliases[alias_name].append(package_name) |
|||
|
|||
return packages, aliases |
|||
|
|||
def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages): |
|||
"""Downloads .deb files and extracts them.""" |
|||
resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages) |
|||
print(f"Resolved packages (including dependencies): {resolved_packages}") |
|||
|
|||
packages_to_download = {} |
|||
|
|||
for pkg in resolved_packages: |
|||
if pkg in packages_info: |
|||
packages_to_download[pkg] = packages_info[pkg] |
|||
|
|||
if pkg in aliases: |
|||
for alias in aliases[pkg]: |
|||
if alias in packages_info: |
|||
packages_to_download[alias] = packages_info[alias] |
|||
|
|||
asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir)) |
|||
|
|||
package_to_deb_file_map = {} |
|||
for pkg in resolved_packages: |
|||
pkg_info = packages_info.get(pkg) |
|||
if pkg_info: |
|||
deb_filename = pkg_info.get("Filename") |
|||
if deb_filename: |
|||
deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename)) |
|||
package_to_deb_file_map[pkg] = deb_file_path |
|||
|
|||
for pkg in reversed(resolved_packages): |
|||
deb_file = package_to_deb_file_map.get(pkg) |
|||
if deb_file and os.path.exists(deb_file): |
|||
extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool) |
|||
|
|||
print("All done!") |
|||
|
|||
def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): |
|||
"""Extract .deb file contents""" |
|||
|
|||
os.makedirs(extract_dir, exist_ok=True) |
|||
|
|||
with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir: |
|||
result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
|||
|
|||
tar_filename = None |
|||
for line in result.stdout.decode().splitlines(): |
|||
if line.startswith("data.tar"): |
|||
tar_filename = line.strip() |
|||
break |
|||
|
|||
if not tar_filename: |
|||
raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.") |
|||
|
|||
tar_file_path = os.path.join(tmp_subdir, tar_filename) |
|||
print(f"Extracting {tar_filename} from {deb_file}..") |
|||
|
|||
subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True) |
|||
|
|||
file_extension = os.path.splitext(tar_file_path)[1].lower() |
|||
|
|||
if file_extension == ".xz": |
|||
mode = "r:xz" |
|||
elif file_extension == ".gz": |
|||
mode = "r:gz" |
|||
elif file_extension == ".zst": |
|||
# zstd is not supported by standard library yet |
|||
decompressed_tar_path = tar_file_path.replace(".zst", "") |
|||
with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file: |
|||
dctx = zstandard.ZstdDecompressor() |
|||
dctx.copy_stream(zst_file, decompressed_file) |
|||
|
|||
tar_file_path = decompressed_tar_path |
|||
mode = "r" |
|||
else: |
|||
raise ValueError(f"Unsupported compression format: {file_extension}") |
|||
|
|||
with tarfile.open(tar_file_path, mode) as tar: |
|||
tar.extractall(path=extract_dir, filter='fully_trusted') |
|||
|
|||
def finalize_setup(rootfsdir): |
|||
lib_dir = os.path.join(rootfsdir, 'lib') |
|||
usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib') |
|||
|
|||
if os.path.exists(lib_dir): |
|||
if os.path.islink(lib_dir): |
|||
os.remove(lib_dir) |
|||
else: |
|||
os.makedirs(usr_lib_dir, exist_ok=True) |
|||
|
|||
for item in os.listdir(lib_dir): |
|||
src = os.path.join(lib_dir, item) |
|||
dest = os.path.join(usr_lib_dir, item) |
|||
|
|||
if os.path.isdir(src): |
|||
shutil.copytree(src, dest, dirs_exist_ok=True) |
|||
else: |
|||
shutil.copy2(src, dest) |
|||
|
|||
shutil.rmtree(lib_dir) |
|||
|
|||
os.symlink(usr_lib_dir, lib_dir) |
|||
|
|||
if __name__ == "__main__": |
|||
parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS") |
|||
parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)") |
|||
parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)") |
|||
parser.add_argument("--rootfsdir", required=True, help="Destination directory.") |
|||
parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.') |
|||
parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") |
|||
parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)") |
|||
parser.add_argument("packages", nargs="+", help="List of package names to be installed.") |
|||
|
|||
args = parser.parse_args() |
|||
|
|||
if args.mirror is None: |
|||
if args.distro == "ubuntu": |
|||
args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports" |
|||
elif args.distro == "debian": |
|||
args.mirror = "http://ftp.debian.org/debian-ports" |
|||
else: |
|||
raise Exception("Unsupported distro") |
|||
|
|||
DESIRED_PACKAGES = args.packages + [ # base packages |
|||
"dpkg", |
|||
"busybox", |
|||
"libc-bin", |
|||
"base-files", |
|||
"base-passwd", |
|||
"debianutils" |
|||
] |
|||
|
|||
print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") |
|||
|
|||
package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite)) |
|||
|
|||
packages_info, aliases = parse_package_index(package_index_content) |
|||
|
|||
with tempfile.TemporaryDirectory() as tmp_dir: |
|||
install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES) |
|||
|
|||
finalize_setup(args.rootfsdir) |
|||
@ -0,0 +1,7 @@ |
|||
@echo off |
|||
|
|||
:: This script is used to install the .NET SDK. |
|||
:: It will also invoke the SDK with any provided arguments. |
|||
|
|||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*" |
|||
exit /b %ErrorLevel% |
|||
@ -0,0 +1,11 @@ |
|||
# This script is used to install the .NET SDK. |
|||
# It will also invoke the SDK with any provided arguments. |
|||
|
|||
. $PSScriptRoot\tools.ps1 |
|||
$dotnetRoot = InitializeDotNetCli -install:$true |
|||
|
|||
# Invoke acquired SDK with args if they are provided |
|||
if ($args.count -gt 0) { |
|||
$env:DOTNET_NOLOGO=1 |
|||
& "$dotnetRoot\dotnet.exe" $args |
|||
} |
|||
@ -0,0 +1,26 @@ |
|||
#!/usr/bin/env bash |
|||
|
|||
# This script is used to install the .NET SDK. |
|||
# It will also invoke the SDK with any provided arguments. |
|||
|
|||
source="${BASH_SOURCE[0]}" |
|||
# resolve $SOURCE until the file is no longer a symlink |
|||
while [[ -h $source ]]; do |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
source="$(readlink "$source")" |
|||
|
|||
# if $source was a relative symlink, we need to resolve it relative to the path where the |
|||
# symlink file was located |
|||
[[ $source != /* ]] && source="$scriptroot/$source" |
|||
done |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
|
|||
source $scriptroot/tools.sh |
|||
InitializeDotNetCli true # install |
|||
|
|||
# Invoke acquired SDK with args if they are provided |
|||
if [[ $# > 0 ]]; then |
|||
__dotnetDir=${_InitializeDotNetCli} |
|||
dotnetPath=${__dotnetDir}/dotnet |
|||
${dotnetPath} "$@" |
|||
fi |
|||
@ -0,0 +1,62 @@ |
|||
#!/bin/sh |
|||
|
|||
set -e |
|||
|
|||
# This is a simple script primarily used for CI to install necessary dependencies |
|||
# |
|||
# Usage: |
|||
# |
|||
# ./install-dependencies.sh <OS> |
|||
|
|||
os="$(echo "$1" | tr "[:upper:]" "[:lower:]")" |
|||
|
|||
if [ -z "$os" ]; then |
|||
. "$(dirname "$0")"/init-os-and-arch.sh |
|||
fi |
|||
|
|||
case "$os" in |
|||
linux) |
|||
if [ -e /etc/os-release ]; then |
|||
. /etc/os-release |
|||
fi |
|||
|
|||
if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then |
|||
apt update |
|||
|
|||
apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \ |
|||
libssl-dev libkrb5-dev pigz cpio |
|||
|
|||
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 |
|||
elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then |
|||
pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" |
|||
$pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio |
|||
elif [ "$ID" = "alpine" ]; then |
|||
apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio |
|||
else |
|||
echo "Unsupported distro. distro: $ID" |
|||
exit 1 |
|||
fi |
|||
;; |
|||
|
|||
osx|maccatalyst|ios|iossimulator|tvos|tvossimulator) |
|||
echo "Installed xcode version: $(xcode-select -p)" |
|||
|
|||
export HOMEBREW_NO_INSTALL_CLEANUP=1 |
|||
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 |
|||
# Skip brew update for now, see https://github.com/actions/setup-python/issues/577 |
|||
# brew update --preinstall |
|||
brew bundle --no-upgrade --file=- <<EOF |
|||
brew "cmake" |
|||
brew "icu4c" |
|||
brew "openssl@3" |
|||
brew "pkgconf" |
|||
brew "python3" |
|||
brew "pigz" |
|||
EOF |
|||
;; |
|||
|
|||
*) |
|||
echo "Unsupported platform. OS: $os" |
|||
exit 1 |
|||
;; |
|||
esac |
|||
@ -0,0 +1,121 @@ |
|||
#!/usr/bin/env bash |
|||
|
|||
show_usage() { |
|||
echo "Common settings:" |
|||
echo " --task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" |
|||
echo " --restore Restore dependencies" |
|||
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" |
|||
echo " --help Print help and exit" |
|||
echo "" |
|||
|
|||
echo "Advanced settings:" |
|||
echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" |
|||
echo " --noWarnAsError Do not warn as error" |
|||
echo "" |
|||
echo "Command line arguments not listed above are passed thru to msbuild." |
|||
} |
|||
|
|||
source="${BASH_SOURCE[0]}" |
|||
|
|||
# resolve $source until the file is no longer a symlink |
|||
while [[ -h "$source" ]]; do |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
source="$(readlink "$source")" |
|||
# if $source was a relative symlink, we need to resolve it relative to the path where the |
|||
# symlink file was located |
|||
[[ $source != /* ]] && source="$scriptroot/$source" |
|||
done |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
|
|||
Build() { |
|||
local target=$1 |
|||
local log_suffix="" |
|||
[[ "$target" != "Execute" ]] && log_suffix=".$target" |
|||
local log="$log_dir/$task$log_suffix.binlog" |
|||
local binaryLogArg="" |
|||
[[ $binary_log == true ]] && binaryLogArg="/bl:$log" |
|||
local output_path="$toolset_dir/$task/" |
|||
|
|||
MSBuild "$taskProject" \ |
|||
$binaryLogArg \ |
|||
/t:"$target" \ |
|||
/p:Configuration="$configuration" \ |
|||
/p:RepoRoot="$repo_root" \ |
|||
/p:BaseIntermediateOutputPath="$output_path" \ |
|||
/v:"$verbosity" \ |
|||
$properties |
|||
} |
|||
|
|||
binary_log=true |
|||
configuration="Debug" |
|||
verbosity="minimal" |
|||
exclude_ci_binary_log=false |
|||
restore=false |
|||
help=false |
|||
properties='' |
|||
warnAsError=true |
|||
|
|||
while (($# > 0)); do |
|||
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" |
|||
case $lowerI in |
|||
--task) |
|||
task=$2 |
|||
shift 2 |
|||
;; |
|||
--restore) |
|||
restore=true |
|||
shift 1 |
|||
;; |
|||
--verbosity) |
|||
verbosity=$2 |
|||
shift 2 |
|||
;; |
|||
--excludecibinarylog|--nobl) |
|||
binary_log=false |
|||
exclude_ci_binary_log=true |
|||
shift 1 |
|||
;; |
|||
--noWarnAsError) |
|||
warnAsError=false |
|||
shift 1 |
|||
;; |
|||
--help) |
|||
help=true |
|||
shift 1 |
|||
;; |
|||
*) |
|||
properties="$properties $1" |
|||
shift 1 |
|||
;; |
|||
esac |
|||
done |
|||
|
|||
ci=true |
|||
|
|||
if $help; then |
|||
show_usage |
|||
exit 0 |
|||
fi |
|||
|
|||
. "$scriptroot/tools.sh" |
|||
InitializeToolset |
|||
|
|||
if [[ -z "$task" ]]; then |
|||
Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task <value>'" |
|||
ExitWithExitCode 1 |
|||
fi |
|||
|
|||
taskProject=$(GetSdkTaskProject "$task") |
|||
if [[ ! -e "$taskProject" ]]; then |
|||
Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task" |
|||
ExitWithExitCode 1 |
|||
fi |
|||
|
|||
if $restore; then |
|||
Build "Restore" |
|||
fi |
|||
|
|||
Build "Execute" |
|||
|
|||
|
|||
ExitWithExitCode 0 |
|||
@ -1,4 +1,4 @@ |
|||
<?xml version="1.0" encoding="utf-8"?> |
|||
<packages> |
|||
<package id="Microsoft.Guardian.Cli" version="0.109.0"/> |
|||
<package id="Microsoft.Guardian.Cli" version="0.199.0"/> |
|||
</packages> |
|||
|
|||
@ -0,0 +1,7 @@ |
|||
steps: |
|||
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml |
|||
parameters: |
|||
is1ESPipeline: true |
|||
|
|||
${{ each parameter in parameters }}: |
|||
${{ parameter.key }}: ${{ parameter.value }} |
|||
@ -0,0 +1,7 @@ |
|||
steps: |
|||
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml |
|||
parameters: |
|||
is1ESPipeline: false |
|||
|
|||
${{ each parameter in parameters }}: |
|||
${{ parameter.key }}: ${{ parameter.value }} |
|||
@ -0,0 +1,207 @@ |
|||
### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet). |
|||
### They initialize the darc CLI and pull the new updates. |
|||
### Changes are applied locally onto the already cloned VMR (located in $vmrPath). |
|||
|
|||
parameters: |
|||
- name: targetRef |
|||
displayName: Target revision in dotnet/<repo> to synchronize |
|||
type: string |
|||
default: $(Build.SourceVersion) |
|||
|
|||
- name: vmrPath |
|||
displayName: Path where the dotnet/dotnet is checked out to |
|||
type: string |
|||
default: $(Agent.BuildDirectory)/vmr |
|||
|
|||
- name: additionalSyncs |
|||
displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol |
|||
type: object |
|||
default: [] |
|||
|
|||
steps: |
|||
- checkout: vmr |
|||
displayName: Clone dotnet/dotnet |
|||
path: vmr |
|||
clean: true |
|||
|
|||
- checkout: self |
|||
displayName: Clone $(Build.Repository.Name) |
|||
path: repo |
|||
fetchDepth: 0 |
|||
|
|||
# This step is needed so that when we get a detached HEAD / shallow clone, |
|||
# we still pull the commit into the temporary repo clone to use it during the sync. |
|||
# Also unshallow the clone so that forwardflow command would work. |
|||
- script: | |
|||
git branch repo-head |
|||
git rev-parse HEAD |
|||
displayName: Label PR commit |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- script: | |
|||
vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml) |
|||
echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha" |
|||
displayName: Obtain the vmr sha from Version.Details.xml (Unix) |
|||
condition: ne(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- powershell: | |
|||
[xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml |
|||
$vmr_sha = $xml.SelectSingleNode("//Source").Sha |
|||
Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha" |
|||
displayName: Obtain the vmr sha from Version.Details.xml (Windows) |
|||
condition: eq(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- script: | |
|||
git fetch --all |
|||
git checkout $(vmr_sha) |
|||
displayName: Checkout VMR at correct sha for repo flow |
|||
workingDirectory: ${{ parameters.vmrPath }} |
|||
|
|||
- script: | |
|||
git config --global user.name "dotnet-maestro[bot]" |
|||
git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com" |
|||
displayName: Set git author to dotnet-maestro[bot] |
|||
workingDirectory: ${{ parameters.vmrPath }} |
|||
|
|||
- script: | |
|||
./eng/common/vmr-sync.sh \ |
|||
--vmr ${{ parameters.vmrPath }} \ |
|||
--tmp $(Agent.TempDirectory) \ |
|||
--azdev-pat '$(dn-bot-all-orgs-code-r)' \ |
|||
--ci \ |
|||
--debug |
|||
|
|||
if [ "$?" -ne 0 ]; then |
|||
echo "##vso[task.logissue type=error]Failed to synchronize the VMR" |
|||
exit 1 |
|||
fi |
|||
displayName: Sync repo into VMR (Unix) |
|||
condition: ne(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- script: | |
|||
git config --global diff.astextplain.textconv echo |
|||
git config --system core.longpaths true |
|||
displayName: Configure Windows git (longpaths, astextplain) |
|||
condition: eq(variables['Agent.OS'], 'Windows_NT') |
|||
|
|||
- powershell: | |
|||
./eng/common/vmr-sync.ps1 ` |
|||
-vmr ${{ parameters.vmrPath }} ` |
|||
-tmp $(Agent.TempDirectory) ` |
|||
-azdevPat '$(dn-bot-all-orgs-code-r)' ` |
|||
-ci ` |
|||
-debugOutput |
|||
|
|||
if ($LASTEXITCODE -ne 0) { |
|||
echo "##vso[task.logissue type=error]Failed to synchronize the VMR" |
|||
exit 1 |
|||
} |
|||
displayName: Sync repo into VMR (Windows) |
|||
condition: eq(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: |
|||
- task: CopyFiles@2 |
|||
displayName: Collect failed patches |
|||
condition: failed() |
|||
inputs: |
|||
SourceFolder: '$(Agent.TempDirectory)' |
|||
Contents: '*.patch' |
|||
TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches' |
|||
|
|||
- publish: '$(Build.ArtifactStagingDirectory)/FailedPatches' |
|||
artifact: $(System.JobDisplayName)_FailedPatches |
|||
displayName: Upload failed patches |
|||
condition: failed() |
|||
|
|||
- ${{ each assetName in parameters.additionalSyncs }}: |
|||
# The vmr-sync script ends up staging files in the local VMR so we have to commit those |
|||
- script: |
|||
git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)" |
|||
displayName: Commit local VMR changes |
|||
workingDirectory: ${{ parameters.vmrPath }} |
|||
|
|||
- script: | |
|||
set -ex |
|||
|
|||
echo "Searching for details of asset ${{ assetName }}..." |
|||
|
|||
# Use darc to get dependencies information |
|||
dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci) |
|||
|
|||
# Extract repository URL and commit hash |
|||
repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1) |
|||
|
|||
if [ -z "$repository" ]; then |
|||
echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list" |
|||
exit 1 |
|||
fi |
|||
|
|||
commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1) |
|||
|
|||
echo "Updating the VMR from $repository / $commit..." |
|||
cd .. |
|||
git clone $repository ${{ assetName }} |
|||
cd ${{ assetName }} |
|||
git checkout $commit |
|||
git branch "sync/$commit" |
|||
|
|||
./eng/common/vmr-sync.sh \ |
|||
--vmr ${{ parameters.vmrPath }} \ |
|||
--tmp $(Agent.TempDirectory) \ |
|||
--azdev-pat '$(dn-bot-all-orgs-code-r)' \ |
|||
--ci \ |
|||
--debug |
|||
|
|||
if [ "$?" -ne 0 ]; then |
|||
echo "##vso[task.logissue type=error]Failed to synchronize the VMR" |
|||
exit 1 |
|||
fi |
|||
displayName: Sync ${{ assetName }} into (Unix) |
|||
condition: ne(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
|
|||
- powershell: | |
|||
$ErrorActionPreference = 'Stop' |
|||
|
|||
Write-Host "Searching for details of asset ${{ assetName }}..." |
|||
|
|||
$dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci |
|||
|
|||
$repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1 |
|||
$repository -match 'Repo:\s+([^\s]+)' | Out-Null |
|||
$repository = $matches[1] |
|||
|
|||
if ($repository -eq $null) { |
|||
Write-Error "Asset ${{ assetName }} not found in the dependency list" |
|||
exit 1 |
|||
} |
|||
|
|||
$commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1 |
|||
$commit -match 'Commit:\s+([^\s]+)' | Out-Null |
|||
$commit = $matches[1] |
|||
|
|||
Write-Host "Updating the VMR from $repository / $commit..." |
|||
cd .. |
|||
git clone $repository ${{ assetName }} |
|||
cd ${{ assetName }} |
|||
git checkout $commit |
|||
git branch "sync/$commit" |
|||
|
|||
.\eng\common\vmr-sync.ps1 ` |
|||
-vmr ${{ parameters.vmrPath }} ` |
|||
-tmp $(Agent.TempDirectory) ` |
|||
-azdevPat '$(dn-bot-all-orgs-code-r)' ` |
|||
-ci ` |
|||
-debugOutput |
|||
|
|||
if ($LASTEXITCODE -ne 0) { |
|||
echo "##vso[task.logissue type=error]Failed to synchronize the VMR" |
|||
exit 1 |
|||
} |
|||
displayName: Sync ${{ assetName }} into (Windows) |
|||
condition: ne(variables['Agent.OS'], 'Windows_NT') |
|||
workingDirectory: $(Agent.BuildDirectory)/repo |
|||
@ -0,0 +1,42 @@ |
|||
# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs. |
|||
# |
|||
# It will run a full set of verification jobs defined in: |
|||
# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38 |
|||
# |
|||
# For repos that do not need to run the full set, you would do the following: |
|||
# |
|||
# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common. |
|||
# |
|||
# 2. Add `verifications` parameter to VMR template reference |
|||
# |
|||
# Examples: |
|||
# - For source-build stage 1 verification, add the following: |
|||
# verifications: [ "source-build-stage1" ] |
|||
# |
|||
# - For Windows only verifications, add the following: |
|||
# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ] |
|||
|
|||
trigger: none |
|||
pr: none |
|||
|
|||
variables: |
|||
- template: /eng/common/templates/variables/pool-providers.yml@self |
|||
|
|||
- name: skipComponentGovernanceDetection # we run CG on internal builds only |
|||
value: true |
|||
|
|||
- name: Codeql.Enabled # we run CodeQL on internal builds only |
|||
value: false |
|||
|
|||
resources: |
|||
repositories: |
|||
- repository: vmr |
|||
type: github |
|||
name: dotnet/dotnet |
|||
endpoint: dotnet |
|||
|
|||
stages: |
|||
- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr |
|||
parameters: |
|||
isBuiltFromVmr: false |
|||
scope: lite |
|||
@ -0,0 +1,138 @@ |
|||
<# |
|||
.SYNOPSIS |
|||
|
|||
This script is used for synchronizing the current repository into a local VMR. |
|||
It pulls the current repository's code into the specified VMR directory for local testing or |
|||
Source-Build validation. |
|||
|
|||
.DESCRIPTION |
|||
|
|||
The tooling used for synchronization will clone the VMR repository into a temporary folder if |
|||
it does not already exist. These clones can be reused in future synchronizations, so it is |
|||
recommended to dedicate a folder for this to speed up re-runs. |
|||
|
|||
.EXAMPLE |
|||
Synchronize current repository into a local VMR: |
|||
./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp" |
|||
|
|||
.PARAMETER tmpDir |
|||
Required. Path to the temporary folder where repositories will be cloned |
|||
|
|||
.PARAMETER vmrBranch |
|||
Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch |
|||
|
|||
.PARAMETER azdevPat |
|||
Optional. Azure DevOps PAT to use for cloning private repositories. |
|||
|
|||
.PARAMETER vmrDir |
|||
Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder |
|||
|
|||
.PARAMETER debugOutput |
|||
Optional. Enables debug logging in the darc vmr command. |
|||
|
|||
.PARAMETER ci |
|||
Optional. Denotes that the script is running in a CI environment. |
|||
#> |
|||
param ( |
|||
[Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")] |
|||
[string][Alias('t', 'tmp')]$tmpDir, |
|||
[string][Alias('b', 'branch')]$vmrBranch, |
|||
[string]$remote, |
|||
[string]$azdevPat, |
|||
[string][Alias('v', 'vmr')]$vmrDir, |
|||
[switch]$ci, |
|||
[switch]$debugOutput |
|||
) |
|||
|
|||
function Fail { |
|||
Write-Host "> $($args[0])" -ForegroundColor 'Red' |
|||
} |
|||
|
|||
function Highlight { |
|||
Write-Host "> $($args[0])" -ForegroundColor 'Cyan' |
|||
} |
|||
|
|||
$verbosity = 'verbose' |
|||
if ($debugOutput) { |
|||
$verbosity = 'debug' |
|||
} |
|||
# Validation |
|||
|
|||
if (-not $tmpDir) { |
|||
Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned" |
|||
exit 1 |
|||
} |
|||
|
|||
# Sanitize the input |
|||
|
|||
if (-not $vmrDir) { |
|||
$vmrDir = Join-Path $tmpDir 'dotnet' |
|||
} |
|||
|
|||
if (-not (Test-Path -Path $tmpDir -PathType Container)) { |
|||
New-Item -ItemType Directory -Path $tmpDir | Out-Null |
|||
} |
|||
|
|||
# Prepare the VMR |
|||
|
|||
if (-not (Test-Path -Path $vmrDir -PathType Container)) { |
|||
Highlight "Cloning 'dotnet/dotnet' into $vmrDir.." |
|||
git clone https://github.com/dotnet/dotnet $vmrDir |
|||
|
|||
if ($vmrBranch) { |
|||
git -C $vmrDir switch -c $vmrBranch |
|||
} |
|||
} |
|||
else { |
|||
if ((git -C $vmrDir diff --quiet) -eq $false) { |
|||
Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes" |
|||
exit 1 |
|||
} |
|||
|
|||
if ($vmrBranch) { |
|||
Highlight "Preparing $vmrDir" |
|||
git -C $vmrDir checkout $vmrBranch |
|||
git -C $vmrDir pull |
|||
} |
|||
} |
|||
|
|||
Set-StrictMode -Version Latest |
|||
|
|||
# Prepare darc |
|||
|
|||
Highlight 'Installing .NET, preparing the tooling..' |
|||
. .\eng\common\tools.ps1 |
|||
$dotnetRoot = InitializeDotNetCli -install:$true |
|||
$darc = Get-Darc |
|||
$dotnet = "$dotnetRoot\dotnet.exe" |
|||
|
|||
Highlight "Starting the synchronization of VMR.." |
|||
|
|||
# Synchronize the VMR |
|||
$darcArgs = ( |
|||
"vmr", "forwardflow", |
|||
"--tmp", $tmpDir, |
|||
"--$verbosity", |
|||
$vmrDir |
|||
) |
|||
|
|||
if ($ci) { |
|||
$darcArgs += ("--ci") |
|||
} |
|||
|
|||
if ($azdevPat) { |
|||
$darcArgs += ("--azdev-pat", $azdevPat) |
|||
} |
|||
|
|||
& "$darc" $darcArgs |
|||
|
|||
if ($LASTEXITCODE -eq 0) { |
|||
Highlight "Synchronization succeeded" |
|||
} |
|||
else { |
|||
Fail "Synchronization of repo to VMR failed!" |
|||
Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)." |
|||
Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)." |
|||
Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." |
|||
exit 1 |
|||
} |
|||
@ -0,0 +1,207 @@ |
|||
#!/bin/bash |
|||
|
|||
### This script is used for synchronizing the current repository into a local VMR. |
|||
### It pulls the current repository's code into the specified VMR directory for local testing or |
|||
### Source-Build validation. |
|||
### |
|||
### The tooling used for synchronization will clone the VMR repository into a temporary folder if |
|||
### it does not already exist. These clones can be reused in future synchronizations, so it is |
|||
### recommended to dedicate a folder for this to speed up re-runs. |
|||
### |
|||
### USAGE: |
|||
### Synchronize current repository into a local VMR: |
|||
### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet" |
|||
### |
|||
### Options: |
|||
### -t, --tmp, --tmp-dir PATH |
|||
### Required. Path to the temporary folder where repositories will be cloned |
|||
### |
|||
### -b, --branch, --vmr-branch BRANCH_NAME |
|||
### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch |
|||
### |
|||
### --debug |
|||
### Optional. Turns on the most verbose logging for the VMR tooling |
|||
### |
|||
### --remote name:URI |
|||
### Optional. Additional remote to use during the synchronization |
|||
### This can be used to synchronize to a commit from a fork of the repository |
|||
### Example: 'runtime:https://github.com/yourfork/runtime' |
|||
### |
|||
### --azdev-pat |
|||
### Optional. Azure DevOps PAT to use for cloning private repositories. |
|||
### |
|||
### -v, --vmr, --vmr-dir PATH |
|||
### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder |
|||
|
|||
source="${BASH_SOURCE[0]}" |
|||
|
|||
# resolve $source until the file is no longer a symlink |
|||
while [[ -h "$source" ]]; do |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
source="$(readlink "$source")" |
|||
# if $source was a relative symlink, we need to resolve it relative to the path where the |
|||
# symlink file was located |
|||
[[ $source != /* ]] && source="$scriptroot/$source" |
|||
done |
|||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" |
|||
|
|||
function print_help () { |
|||
sed -n '/^### /,/^$/p' "$source" | cut -b 5- |
|||
} |
|||
|
|||
COLOR_RED=$(tput setaf 1 2>/dev/null || true) |
|||
COLOR_CYAN=$(tput setaf 6 2>/dev/null || true) |
|||
COLOR_CLEAR=$(tput sgr0 2>/dev/null || true) |
|||
COLOR_RESET=uniquesearchablestring |
|||
FAILURE_PREFIX='> ' |
|||
|
|||
function fail () { |
|||
echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2 |
|||
} |
|||
|
|||
function highlight () { |
|||
echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}" |
|||
} |
|||
|
|||
tmp_dir='' |
|||
vmr_dir='' |
|||
vmr_branch='' |
|||
additional_remotes='' |
|||
verbosity=verbose |
|||
azdev_pat='' |
|||
ci=false |
|||
|
|||
while [[ $# -gt 0 ]]; do |
|||
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" |
|||
case "$opt" in |
|||
-t|--tmp|--tmp-dir) |
|||
tmp_dir=$2 |
|||
shift |
|||
;; |
|||
-v|--vmr|--vmr-dir) |
|||
vmr_dir=$2 |
|||
shift |
|||
;; |
|||
-b|--branch|--vmr-branch) |
|||
vmr_branch=$2 |
|||
shift |
|||
;; |
|||
--remote) |
|||
additional_remotes="$additional_remotes $2" |
|||
shift |
|||
;; |
|||
--azdev-pat) |
|||
azdev_pat=$2 |
|||
shift |
|||
;; |
|||
--ci) |
|||
ci=true |
|||
;; |
|||
-d|--debug) |
|||
verbosity=debug |
|||
;; |
|||
-h|--help) |
|||
print_help |
|||
exit 0 |
|||
;; |
|||
*) |
|||
fail "Invalid argument: $1" |
|||
print_help |
|||
exit 1 |
|||
;; |
|||
esac |
|||
|
|||
shift |
|||
done |
|||
|
|||
# Validation |
|||
|
|||
if [[ -z "$tmp_dir" ]]; then |
|||
fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned" |
|||
exit 1 |
|||
fi |
|||
|
|||
# Sanitize the input |
|||
|
|||
if [[ -z "$vmr_dir" ]]; then |
|||
vmr_dir="$tmp_dir/dotnet" |
|||
fi |
|||
|
|||
if [[ ! -d "$tmp_dir" ]]; then |
|||
mkdir -p "$tmp_dir" |
|||
fi |
|||
|
|||
if [[ "$verbosity" == "debug" ]]; then |
|||
set -x |
|||
fi |
|||
|
|||
# Prepare the VMR |
|||
|
|||
if [[ ! -d "$vmr_dir" ]]; then |
|||
highlight "Cloning 'dotnet/dotnet' into $vmr_dir.." |
|||
git clone https://github.com/dotnet/dotnet "$vmr_dir" |
|||
|
|||
if [[ -n "$vmr_branch" ]]; then |
|||
git -C "$vmr_dir" switch -c "$vmr_branch" |
|||
fi |
|||
else |
|||
if ! git -C "$vmr_dir" diff --quiet; then |
|||
fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes" |
|||
exit 1 |
|||
fi |
|||
|
|||
if [[ -n "$vmr_branch" ]]; then |
|||
highlight "Preparing $vmr_dir" |
|||
git -C "$vmr_dir" checkout "$vmr_branch" |
|||
git -C "$vmr_dir" pull |
|||
fi |
|||
fi |
|||
|
|||
set -e |
|||
|
|||
# Prepare darc |
|||
|
|||
highlight 'Installing .NET, preparing the tooling..' |
|||
source "./eng/common/tools.sh" |
|||
InitializeDotNetCli true |
|||
GetDarc |
|||
dotnetDir=$( cd ./.dotnet/; pwd -P ) |
|||
dotnet=$dotnetDir/dotnet |
|||
|
|||
highlight "Starting the synchronization of VMR.." |
|||
set +e |
|||
|
|||
if [[ -n "$additional_remotes" ]]; then |
|||
additional_remotes="--additional-remotes $additional_remotes" |
|||
fi |
|||
|
|||
if [[ -n "$azdev_pat" ]]; then |
|||
azdev_pat="--azdev-pat $azdev_pat" |
|||
fi |
|||
|
|||
ci_arg='' |
|||
if [[ "$ci" == "true" ]]; then |
|||
ci_arg="--ci" |
|||
fi |
|||
|
|||
# Synchronize the VMR |
|||
|
|||
export DOTNET_ROOT="$dotnetDir" |
|||
|
|||
"$darc_tool" vmr forwardflow \ |
|||
--tmp "$tmp_dir" \ |
|||
$azdev_pat \ |
|||
--$verbosity \ |
|||
$ci_arg \ |
|||
$additional_remotes \ |
|||
"$vmr_dir" |
|||
|
|||
if [[ $? == 0 ]]; then |
|||
highlight "Synchronization succeeded" |
|||
else |
|||
fail "Synchronization of repo to VMR failed!" |
|||
fail "'$vmr_dir' is left in its last state (re-run of this script will reset it)." |
|||
fail "Please inspect the logs which contain path to the failing patch file (use --debug to get all the details)." |
|||
fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." |
|||
exit 1 |
|||
fi |
|||
Loading…
Reference in new issue