From 9a57f1b8f6123716135136aaaa3bd95c194ac740 Mon Sep 17 00:00:00 2001 From: "Ihor.Zhuravlov" Date: Fri, 3 Jan 2025 16:18:57 +0100 Subject: [PATCH 1/5] Updated .NET6 to .NET8, .net framework 4.6 to 4.8, and dependencies. --- .vscode/settings.json | 5 +++ NuGet.config | 2 +- README.md | 13 ++++--- azure-pipelines-e2e-tests-template.yml | 6 +-- benchmark/README.md | 2 +- benchmark/csharp/Tpch/Tpch.csproj | 6 +-- deployment/README.md | 6 +-- docs/building/ubuntu-instructions.md | 37 ++++++++++--------- docs/building/windows-instructions.md | 37 ++++++++++--------- docs/getting-started/macos-instructions.md | 4 +- docs/getting-started/ubuntu-instructions.md | 4 +- docs/getting-started/windows-instructions.md | 4 +- docs/migration-guide.md | 2 +- docs/udf-guide.md | 4 +- eng/AfterSolutionBuild.targets | 8 ++-- .../Microsoft.Spark.CSharp.Examples.csproj | 6 +-- .../Sql/Batch/VectorUdfs.cs | 1 + .../Microsoft.Spark.FSharp.Examples.fsproj | 4 +- global.json | 2 +- src/csharp/Directory.Build.targets | 2 +- ...soft.Spark.Extensions.Delta.E2ETest.csproj | 2 +- ...ensions.DotNet.Interactive.UnitTest.csproj | 5 ++- ...Spark.Extensions.DotNet.Interactive.csproj | 2 +- ...Spark.Extensions.Hyperspace.E2ETest.csproj | 2 +- .../Microsoft.Spark.E2ETest.csproj | 3 +- .../Microsoft.Spark.UnitTest.csproj | 6 +-- .../CommandExecutorTests.cs | 6 ++- .../Microsoft.Spark.Worker.UnitTest.csproj | 5 ++- .../Command/RDDCommandExecutor.cs | 4 +- .../Command/SqlCommandExecutor.cs | 8 ++-- .../Microsoft.Spark.Worker.csproj | 7 ++-- .../Processor/BroadcastVariableProcessor.cs | 4 +- .../Microsoft.Spark/Microsoft.Spark.csproj | 14 ++++--- 33 files changed, 123 insertions(+), 100 deletions(-) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..32cfc61d2 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "files.watcherExclude": { + "**/target": true + } +} \ No newline at end of file diff --git a/NuGet.config b/NuGet.config index b3e3b7454..1c47d39d6 100644 --- a/NuGet.config +++ b/NuGet.config @@ -5,6 +5,6 @@ - + diff --git a/README.md b/README.md index e23e85cd2..c76f2f74b 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ .NET for Apache Spark is compliant with .NET Standard - a formal specification of .NET APIs that are common across .NET implementations. This means you can use .NET for Apache Spark anywhere you write .NET code allowing you to reuse all the knowledge, skills, code, and libraries you already have as a .NET developer. -.NET for Apache Spark runs on Windows, Linux, and macOS using .NET 6, or Windows using .NET Framework. It also runs on all major cloud providers including [Azure HDInsight Spark](deployment/README.md#azure-hdinsight-spark), [Amazon EMR Spark](deployment/README.md#amazon-emr-spark), [AWS](deployment/README.md#databricks) & [Azure](deployment/README.md#databricks) Databricks. +.NET for Apache Spark runs on Windows, Linux, and macOS using .NET 8, or Windows using .NET Framework. It also runs on all major cloud providers including [Azure HDInsight Spark](deployment/README.md#azure-hdinsight-spark), [Amazon EMR Spark](deployment/README.md#amazon-emr-spark), [AWS](deployment/README.md#databricks) & [Azure](deployment/README.md#databricks) Databricks. **Note**: We currently have a Spark Project Improvement Proposal JIRA at [SPIP: .NET bindings for Apache Spark](https://issues.apache.org/jira/browse/SPARK-27006) to work with the community towards getting .NET support by default into Apache Spark. We highly encourage you to participate in the discussion. @@ -40,7 +40,7 @@ 2.4* - v2.1.1 + v2.1.1 3.0 @@ -50,6 +50,9 @@ 3.2 + + + 3.5 @@ -61,7 +64,7 @@ .NET for Apache Spark releases are available [here](https://github.com/dotnet/spark/releases) and NuGet packages are available [here](https://www.nuget.org/packages/Microsoft.Spark). ## Get Started -These instructions will show you how to run a .NET for Apache Spark app using .NET 6. +These instructions will show you how to run a .NET for Apache Spark app using .NET 8. - [Windows Instructions](docs/getting-started/windows-instructions.md) - [Ubuntu Instructions](docs/getting-started/ubuntu-instructions.md) - [MacOs Instructions](docs/getting-started/macos-instructions.md) @@ -79,8 +82,8 @@ Building from source is very easy and the whole process (from cloning to being a | | | Instructions | | :---: | :--- | :--- | -| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** |
  • Local - [.NET Framework 4.6.1](docs/building/windows-instructions.md#using-visual-studio-for-net-framework-461)
  • Local - [.NET 6](docs/building/windows-instructions.md#using-net-core-cli-for-net-core)
    • | -| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** |
      • Local - [.NET 6](docs/building/ubuntu-instructions.md)
      • [Azure HDInsight Spark - .NET 6](deployment/README.md)
      | +| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** |
      • Local - [.NET Framework 4.8](docs/building/windows-instructions.md#using-visual-studio-for-net-framework)
      • Local - [.NET 8](docs/building/windows-instructions.md#using-net-core-cli-for-net-core)
        • | +| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** |
          • Local - [.NET 8](docs/building/ubuntu-instructions.md)
          • [Azure HDInsight Spark - .NET 8](deployment/README.md)
          | ## Samples diff --git a/azure-pipelines-e2e-tests-template.yml b/azure-pipelines-e2e-tests-template.yml index 8fe5de8c5..cd56e48f7 100644 --- a/azure-pipelines-e2e-tests-template.yml +++ b/azure-pipelines-e2e-tests-template.yml @@ -65,10 +65,10 @@ stages: mvn -version - task: UseDotNet@2 - displayName: 'Use .NET 6 sdk' + displayName: 'Use .NET 8 sdk' inputs: packageType: sdk - version: 6.x + version: 8.x installationPath: $(Agent.ToolsDirectory)/dotnet - task: DownloadPipelineArtifact@2 @@ -78,7 +78,7 @@ stages: artifactName: Microsoft.Spark.Binaries - pwsh: | - $framework = "net6.0" + $framework = "net8.0" if ($env:AGENT_OS -eq 'Windows_NT') { $runtimeIdentifier = "win-x64" diff --git a/benchmark/README.md b/benchmark/README.md index fb2c99f12..89cb54e15 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -60,7 +60,7 @@ TPCH timing results is written to stdout in the following form: `TPCH_Result, ``` - **Note**: Ensure that you build the worker and application with .NET 6 in order to run hardware acceleration queries. + **Note**: Ensure that you build the worker and application with .NET 8 in order to run hardware acceleration queries. ## Python 1. Upload [run_python_benchmark.sh](run_python_benchmark.sh) and all [python tpch benchmark](python/) files to the cluster. diff --git a/benchmark/csharp/Tpch/Tpch.csproj b/benchmark/csharp/Tpch/Tpch.csproj index 53f324200..b9b54ffcc 100644 --- a/benchmark/csharp/Tpch/Tpch.csproj +++ b/benchmark/csharp/Tpch/Tpch.csproj @@ -2,8 +2,8 @@ Exe - net461;net6.0 - net6.0 + net48;net8.0 + net8.0 Tpch Tpch @@ -16,7 +16,7 @@ - + true diff --git a/deployment/README.md b/deployment/README.md index dc87f3ac0..a65651616 100644 --- a/deployment/README.md +++ b/deployment/README.md @@ -63,7 +63,7 @@ Microsoft.Spark.Worker is a backend component that lives on the individual worke ## Azure HDInsight Spark [Azure HDInsight Spark](https://docs.microsoft.com/en-us/azure/hdinsight/spark/apache-spark-overview) is the Microsoft implementation of Apache Spark in the cloud that allows users to launch and configure Spark clusters in Azure. You can use HDInsight Spark clusters to process your data stored in Azure (e.g., [Azure Storage](https://azure.microsoft.com/en-us/services/storage/) and [Azure Data Lake Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-introduction)). -> **Note:** Azure HDInsight Spark is Linux-based. Therefore, if you are interested in deploying your app to Azure HDInsight Spark, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app. +> **Note:** Azure HDInsight Spark is Linux-based. Therefore, if you are interested in deploying your app to Azure HDInsight Spark, make sure your app is .NET Standard compatible and that you use [.NET 8 compiler](https://dotnet.microsoft.com/download) to compile your app. ### Deploy Microsoft.Spark.Worker *Note that this step is required only once* @@ -115,7 +115,7 @@ EOF ## Amazon EMR Spark [Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-what-is-emr.html) is a managed cluster platform that simplifies running big data frameworks on AWS. -> **Note:** AWS EMR Spark is Linux-based. Therefore, if you are interested in deploying your app to AWS EMR Spark, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app. +> **Note:** AWS EMR Spark is Linux-based. Therefore, if you are interested in deploying your app to AWS EMR Spark, make sure your app is .NET Standard compatible and that you use [.NET 8 compiler](https://dotnet.microsoft.com/download) to compile your app. ### Deploy Microsoft.Spark.Worker *Note that this step is only required at cluster creation* @@ -160,7 +160,7 @@ foo@bar:~$ aws emr add-steps \ ## Databricks [Databricks](http://databricks.com) is a platform that provides cloud-based big data processing using Apache Spark. -> **Note:** [Azure](https://azure.microsoft.com/en-us/services/databricks/) and [AWS](https://databricks.com/aws) Databricks is Linux-based. Therefore, if you are interested in deploying your app to Databricks, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app. +> **Note:** [Azure](https://azure.microsoft.com/en-us/services/databricks/) and [AWS](https://databricks.com/aws) Databricks is Linux-based. Therefore, if you are interested in deploying your app to Databricks, make sure your app is .NET Standard compatible and that you use [.NET 8 compiler](https://dotnet.microsoft.com/download) to compile your app. Databricks allows you to submit Spark .NET apps to an existing active cluster or create a new cluster everytime you launch a job. This requires the **Microsoft.Spark.Worker** to be installed **first** before you submit a Spark .NET app. diff --git a/docs/building/ubuntu-instructions.md b/docs/building/ubuntu-instructions.md index dc72ad9ee..2a6235442 100644 --- a/docs/building/ubuntu-instructions.md +++ b/docs/building/ubuntu-instructions.md @@ -2,11 +2,13 @@ Building Spark .NET on Ubuntu 18.04 ========================== # Table of Contents -- [Open Issues](#open-issues) -- [Pre-requisites](#pre-requisites) +- [Building Spark .NET on Ubuntu 18.04](#building-spark-net-on-ubuntu-1804) +- [Table of Contents](#table-of-contents) +- [Open Issues:](#open-issues) +- [Pre-requisites:](#pre-requisites) - [Building](#building) - [Building Spark .NET Scala Extensions Layer](#building-spark-net-scala-extensions-layer) - - [Building .NET Sample Applications using .NET Core CLI](#building-net-sample-applications-using-net-core-cli) + - [Building .NET Sample Applications using .NET 8 CLI](#building-net-sample-applications-using-net-8-cli) - [Run Samples](#run-samples) # Open Issues: @@ -16,7 +18,7 @@ Building Spark .NET on Ubuntu 18.04 If you already have all the pre-requisites, skip to the [build](ubuntu-instructions.md#building) steps below. - 1. Download and install **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** - installing the SDK will add the `dotnet` toolchain to your path. + 1. Download and install **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** - installing the SDK will add the `dotnet` toolchain to your path. 2. Install **[OpenJDK 8](https://openjdk.java.net/install/)** - You can use the following command: ```bash @@ -110,33 +112,34 @@ Let us now build the Spark .NET Scala extension layer. This is easy to do: ``` cd src/scala -mvn clean package +mvn clean package ``` You should see JARs created for the supported Spark versions: * `microsoft-spark-2-3/target/microsoft-spark-2-3_2.11-.jar` * `microsoft-spark-2-4/target/microsoft-spark-2-4_2.11-.jar` * `microsoft-spark-3-0/target/microsoft-spark-3-0_2.12-.jar` +* `microsoft-spark-3-0/target/microsoft-spark-3-5_2.12-.jar` -## Building .NET Sample Applications using .NET 6 CLI +## Building .NET Sample Applications using .NET 8 CLI 1. Build the Worker ```bash cd ~/dotnet.spark/src/csharp/Microsoft.Spark.Worker/ - dotnet publish -f net6.0 -r linux-x64 + dotnet publish -f net8.0 -r linux-x64 ```
          📙 Click to see sample console output ```bash - user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f net6.0 -r linux-x64 + user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f net8.0 -r linux-x64 Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core Copyright (C) Microsoft Corporation. All rights reserved. Restore completed in 36.03 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj. Restore completed in 35.94 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj. Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll - Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/Microsoft.Spark.Worker.dll - Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/publish/ + Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net8.0/linux-x64/Microsoft.Spark.Worker.dll + Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net8.0/linux-x64/publish/ ```
          @@ -144,31 +147,31 @@ You should see JARs created for the supported Spark versions: 2. Build the Samples ```bash cd ~/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/ - dotnet publish -f net6.0 -r linux-x64 + dotnet publish -f net8.0 -r linux-x64 ```
          📙 Click to see sample console output ```bash - user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f net6.0 -r linux-x64 + user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f net8.0 -r linux-x64 Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core Copyright (C) Microsoft Corporation. All rights reserved. Restore completed in 37.11 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj. Restore completed in 281.63 ms for /home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj. Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll - Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/Microsoft.Spark.CSharp.Examples.dll - Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/publish/ + Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net8.0/linux-x64/Microsoft.Spark.CSharp.Examples.dll + Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net8.0/linux-x64/publish/ ```
          # Run Samples -Once you build the samples, you can use `spark-submit` to submit your .NET 6 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark. +Once you build the samples, you can use `spark-submit` to submit your .NET 8 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark. - 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/publish`) - 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/publish`) + 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net8.0/linux-x64/publish`) + 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net8.0/linux-x64/publish`) 3. Running your app follows the basic structure: ```bash spark-submit \ diff --git a/docs/building/windows-instructions.md b/docs/building/windows-instructions.md index 4b55a92e0..8feb82a92 100644 --- a/docs/building/windows-instructions.md +++ b/docs/building/windows-instructions.md @@ -8,7 +8,7 @@ Building Spark .NET on Windows - [Building Spark .NET Scala Extensions Layer](#building-spark-net-scala-extensions-layer) - [Building .NET Samples Application](#building-net-samples-application) - [Using Visual Studio for .NET Framework](#using-visual-studio-for-net-framework) - - [Using .NET CLI for .NET 6](#using-net-cli-for-net-6) + - [Using .NET CLI for .NET 8](#using-net-cli-for-net-8) - [Run Samples](#run-samples) # Open Issues: @@ -20,12 +20,12 @@ Building Spark .NET on Windows If you already have all the pre-requisites, skip to the [build](windows-instructions.md#building) steps below. - 1. Download and install the **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** - installing the SDK will add the `dotnet` toolchain to your path. + 1. Download and install the **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** - installing the SDK will add the `dotnet` toolchain to your path. 2. Install **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** (Version 16.4 or later). The Community version is completely free. When configuring your installation, include these components at minimum: * .NET desktop development * All Required Components - * .NET Framework 4.6.1 Development Tools - * .NET 6 cross-platform development + * .NET Framework 4.8 Development Tools + * .NET 8 cross-platform development * All Required Components 3. Install **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** - Select the appropriate version for your operating system e.g., jdk-8u201-windows-x64.exe for Win x64 machine. @@ -90,7 +90,7 @@ git clone https://github.com/dotnet/spark.git c:\github\dotnet-spark When you submit a .NET application, Spark .NET has the necessary logic written in Scala that inform Apache Spark how to handle your requests (e.g., request to create a new Spark Session, request to transfer data from .NET side to JVM side etc.). This logic can be found in the [Spark .NET Scala Source Code](../../src/scala). -Regardless of whether you are using .NET Framework or .NET 6, you will need to build the Spark .NET Scala extension layer. This is easy to do: +Regardless of whether you are using .NET Framework or .NET 8, you will need to build the Spark .NET Scala extension layer. This is easy to do: ```powershell cd src\scala @@ -100,6 +100,7 @@ You should see JARs created for the supported Spark versions: * `microsoft-spark-2-3\target\microsoft-spark-2-3_2.11-.jar` * `microsoft-spark-2-4\target\microsoft-spark-2-4_2.11-.jar` * `microsoft-spark-3-0\target\microsoft-spark-3-0_2.12-.jar` +* `microsoft-spark-3-0\target\microsoft-spark-3-5_2.12-.jar` ## Building .NET Samples Application @@ -148,59 +149,59 @@ You should see JARs created for the supported Spark versions: -### Using .NET CLI for .NET 6 +### Using .NET CLI for .NET 8 -> Note: We are currently working on automating .NET 6 builds for Spark .NET. Until then, we appreciate your patience in performing some of the steps manually. +> Note: We are currently working on automating .NET 8 builds for Spark .NET. Until then, we appreciate your patience in performing some of the steps manually. 1. Build the Worker ```powershell cd C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\ - dotnet publish -f net6.0 -r win-x64 + dotnet publish -f net8.0 -r win-x64 ```
          📙 Click to see sample console output ```powershell - PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f net6.0 -r win-x64 + PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f net8.0 -r win-x64 Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core Copyright (C) Microsoft Corporation. All rights reserved. Restore completed in 299.95 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj. Restore completed in 306.62 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\Microsoft.Spark.Worker.csproj. Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll - Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\Microsoft.Spark.Worker.dll - Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\publish\ + Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net8.0\win-x64\Microsoft.Spark.Worker.dll + Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net8.0\win-x64\publish\ ```
          2. Build the Samples ```powershell cd C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\ - dotnet publish -f net6.0 -r win-x64 + dotnet publish -f net8.0 -r win-x64 ```
          📙 Click to see sample console output ```powershell - PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f net6.0 -r win10-x64 + PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f net8.0 -r win10-x64 Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core Copyright (C) Microsoft Corporation. All rights reserved. Restore completed in 44.22 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj. Restore completed in 336.94 ms for C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\Microsoft.Spark.CSharp.Examples.csproj. Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll - Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win-x64\Microsoft.Spark.CSharp.Examples.dll - Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win-x64\publish\ + Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net8.0\win-x64\Microsoft.Spark.CSharp.Examples.dll + Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net8.0\win-x64\publish\ ```
          # Run Samples -Once you build the samples, running them will be through `spark-submit` regardless of whether you are targeting .NET Framework or .NET 6 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark. +Once you build the samples, running them will be through `spark-submit` regardless of whether you are targeting .NET Framework or .NET 8 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark. - 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\publish` for .NET 6) - 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win1-x64\publish` for .NET 6) + 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net48` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net8.0\win-x64\publish` for .NET 8) + 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net48` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net8.0\win1-x64\publish` for .NET 8) 3. Running your app follows the basic structure: ```powershell spark-submit.cmd ` diff --git a/docs/getting-started/macos-instructions.md b/docs/getting-started/macos-instructions.md index 3a7c55d5e..c1f89cfe4 100644 --- a/docs/getting-started/macos-instructions.md +++ b/docs/getting-started/macos-instructions.md @@ -1,10 +1,10 @@ # Getting Started with Spark .NET on MacOS -These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on MacOSX. +These instructions will show you how to run a .NET for Apache Spark app using .NET 8 on MacOSX. ## Pre-requisites -- Download and install **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** +- Download and install **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** - Install **[Java 8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** - Select the appropriate version for your operating system e.g., `jdk-8u231-macosx-x64.dmg`. - Install using the installer and verify you are able to run `java` from your command-line diff --git a/docs/getting-started/ubuntu-instructions.md b/docs/getting-started/ubuntu-instructions.md index 36c048177..d2672bac0 100644 --- a/docs/getting-started/ubuntu-instructions.md +++ b/docs/getting-started/ubuntu-instructions.md @@ -1,10 +1,10 @@ # Getting Started with Spark.NET on Ubuntu -These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on Ubuntu 18.04. +These instructions will show you how to run a .NET for Apache Spark app using .NET 8 on Ubuntu 18.04. ## Pre-requisites -- Download and install the following: **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)** +- Download and install the following: **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)** - Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release: - Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `~/bin/Microsoft.Spark.Worker`). - **IMPORTANT** Create a [new environment variable](https://help.ubuntu.com/community/EnvironmentVariables) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `~/bin/Microsoft.Spark.Worker`). diff --git a/docs/getting-started/windows-instructions.md b/docs/getting-started/windows-instructions.md index b5e2c8713..7ecb5757f 100644 --- a/docs/getting-started/windows-instructions.md +++ b/docs/getting-started/windows-instructions.md @@ -1,10 +1,10 @@ # Getting Started with Spark .NET on Windows -These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on Windows. +These instructions will show you how to run a .NET for Apache Spark app using .NET 8 on Windows. ## Pre-requisites -- Download and install the following: **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)** +- Download and install the following: **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)** - Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release: - Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `c:\bin\Microsoft.Spark.Worker\`). - **IMPORTANT** Create a [new environment variable](https://www.java.com/en/download/help/path.xml) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `c:\bin\Microsoft.Spark.Worker`). diff --git a/docs/migration-guide.md b/docs/migration-guide.md index daf13571e..ea7b48078 100644 --- a/docs/migration-guide.md +++ b/docs/migration-guide.md @@ -2,7 +2,7 @@ - [Upgrading from Microsoft.Spark 0.x to 1.0](#upgrading-from-microsoftspark-0x-to-10) ## Upgrading from Microsoft.Spark 0.x to 1.0 -- Limited support for [.NET Framework](https://dotnet.microsoft.com/learn/dotnet/what-is-dotnet-framework). Please migrate to **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** instead. +- Limited support for [.NET Framework](https://dotnet.microsoft.com/learn/dotnet/what-is-dotnet-framework). Please migrate to **[.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)** instead. - `Microsoft.Spark.Sql.Streaming.DataStreamWriter.Foreach` does not work in .NET Framework ([#576](https://github.com/dotnet/spark/issues/576)) - `Microsoft.Spark.Worker` should be upgraded to 1.0 as `Microsoft.Spark.Worker` 0.x is not forward-compatible with `Microsoft.Spark` 1.0. - `Microsoft.Spark` should be upgraded to 1.0 as `Microsoft.Spark.Worker` 1.0 is not backward-compatible with `Microsoft.Spark` 0.x. diff --git a/docs/udf-guide.md b/docs/udf-guide.md index 5eb0a24fc..236247fd1 100644 --- a/docs/udf-guide.md +++ b/docs/udf-guide.md @@ -53,7 +53,7 @@ Since UDFs are functions that need to be executed on the workers, they have to b ## Good to know while implementing UDFs -One behavior to be aware of while implementing UDFs in .NET for Apache Spark is how the target of the UDF gets serialized. .NET for Apache Spark uses .NET 6, which does not support serializing delegates, so it is instead done by using reflection to serialize the target where the delegate is defined. When multiple delegates are defined in a common scope, they have a shared closure that becomes the target of reflection for serialization. Let's take an example to illustrate what that means. +One behavior to be aware of while implementing UDFs in .NET for Apache Spark is how the target of the UDF gets serialized. .NET for Apache Spark uses .NET 8, which does not support serializing delegates, so it is instead done by using reflection to serialize the target where the delegate is defined. When multiple delegates are defined in a common scope, they have a shared closure that becomes the target of reflection for serialization. Let's take an example to illustrate what that means. The following code snippet defines two string variables that are being referenced in two function delegates that return the respective strings as result: @@ -168,4 +168,4 @@ public class C Here we see that `func` and `func2` no longer share a closure and have their own separate closures `<>c__DisplayClass0_0` and `<>c__DisplayClass0_1` respectively. When used as the target for serialization, nothing other than the referenced variables will get serialized for the delegate. This behavior is important to keep in mind while implementing multiple UDFs in a common scope. -To learn more about UDFs in general, please review the following articles that explain UDFs and how to use them: [UDFs in databricks(scala)](https://docs.databricks.com/spark/latest/spark-sql/udf-scala.html), [Spark UDFs and some gotchas](https://medium.com/@achilleus/spark-udfs-we-can-use-them-but-should-we-use-them-2c5a561fde6d). \ No newline at end of file +To learn more about UDFs in general, please review the following articles that explain UDFs and how to use them: [UDFs in databricks(scala)](https://docs.databricks.com/spark/latest/spark-sql/udf-scala.html), [Spark UDFs and some gotchas](https://medium.com/@achilleus/spark-udfs-we-can-use-them-but-should-we-use-them-2c5a561fde6d). diff --git a/eng/AfterSolutionBuild.targets b/eng/AfterSolutionBuild.targets index 961a75000..9627f97a0 100644 --- a/eng/AfterSolutionBuild.targets +++ b/eng/AfterSolutionBuild.targets @@ -5,10 +5,10 @@ - <_PublishTarget Include="FullFramework" Framework="net461" RuntimeId="win-x64" /> - <_PublishTarget Include="WindowsCore" Framework="net6.0" RuntimeId="win-x64" /> - <_PublishTarget Include="LinuxCore" Framework="net6.0" RuntimeId="linux-x64" /> - <_PublishTarget Include="MacOSCore" Framework="net6.0" RuntimeId="osx-x64" /> + <_PublishTarget Include="FullFramework" Framework="net48" RuntimeId="win-x64" /> + <_PublishTarget Include="WindowsCore" Framework="net8.0" RuntimeId="win-x64" /> + <_PublishTarget Include="LinuxCore" Framework="net8.0" RuntimeId="linux-x64" /> + <_PublishTarget Include="MacOSCore" Framework="net8.0" RuntimeId="osx-x64" /> Exe - net461;net6.0 - net6.0 + net48;net8.0 + net8.0 Microsoft.Spark.Examples Microsoft.Spark.CSharp.Examples @@ -13,7 +13,7 @@ - + diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/VectorUdfs.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/VectorUdfs.cs index 2497d5ef3..85ab974c1 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/VectorUdfs.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/VectorUdfs.cs @@ -7,6 +7,7 @@ using Apache.Arrow.Types; using Microsoft.Spark.Sql; using Microsoft.Spark.Sql.Types; +using IntegerType = Microsoft.Spark.Sql.Types.IntegerType; using StructType = Microsoft.Spark.Sql.Types.StructType; namespace Microsoft.Spark.Examples.Sql.Batch diff --git a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj index d1d577681..40ffebd11 100644 --- a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj +++ b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj @@ -2,8 +2,8 @@ Exe - net461;net6.0 - net6.0 + net48;net8.0 + net8.0 Microsoft.Spark.Examples Microsoft.Spark.FSharp.Examples false diff --git a/global.json b/global.json index 8f9d74633..6a00584c6 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "6.0.400" + "dotnet": "8.0.404" }, "msbuild-sdks": { "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.20230.5" diff --git a/src/csharp/Directory.Build.targets b/src/csharp/Directory.Build.targets index 45f505301..c00601eaa 100644 --- a/src/csharp/Directory.Build.targets +++ b/src/csharp/Directory.Build.targets @@ -3,7 +3,7 @@ - + all runtime; build; native; contentfiles; analyzers diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj index 393813db4..6fee61e25 100644 --- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj +++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj @@ -1,6 +1,6 @@  - net6.0 + net8.0 diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj index bd592374e..5128070fb 100644 --- a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj +++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj @@ -1,12 +1,13 @@  - net6.0 + net8.0 Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest + true - + diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj index c6aba59c4..04b7504b3 100644 --- a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj +++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj @@ -2,7 +2,7 @@ Library - net6.0 + net8.0 Microsoft.Spark.Extensions.DotNet.Interactive true true diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj index 7de956704..66837c232 100644 --- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj +++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj @@ -1,6 +1,6 @@  - net6.0 + net8.0 diff --git a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj index b9e2bedd8..e242da0d0 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj +++ b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj @@ -1,6 +1,7 @@  - net6.0 + net8.0 + true diff --git a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj index d863334da..e635434ae 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj +++ b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj @@ -1,13 +1,13 @@  - net6.0 + net8.0 Microsoft.Spark.UnitTest + true - - + diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs index 7e8887975..cc43b9f37 100644 --- a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs +++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs @@ -674,7 +674,7 @@ public void TestArrowSqlCommandExecutorWithEmptyInput( var arrowReader = new ArrowStreamReader(outputStream); RecordBatch outputBatch = arrowReader.ReadNextRecordBatch(); - Assert.Equal(1, outputBatch.Schema.Fields.Count); + Assert.Equal(1, outputBatch.Schema.FieldsList.Count); Assert.IsType(outputBatch.Schema.GetFieldByIndex(0).DataType); Assert.Equal(0, outputBatch.Length); @@ -759,7 +759,7 @@ public void TestDataFrameSqlCommandExecutorWithEmptyInput( var arrowReader = new ArrowStreamReader(outputStream); RecordBatch outputBatch = arrowReader.ReadNextRecordBatch(); - Assert.Equal(1, outputBatch.Schema.Fields.Count); + Assert.Equal(1, outputBatch.Schema.FieldsList.Count); Assert.IsType(outputBatch.Schema.GetFieldByIndex(0).DataType); Assert.Equal(0, outputBatch.Length); @@ -1049,8 +1049,10 @@ public void TestRDDCommandExecutor(Version sparkVersion, IpcOptions ipcOptions) using var inputStream = new MemoryStream(); using var outputStream = new MemoryStream(); +#pragma warning disable SYSLIB0011 // Type or member is obsolete // Write test data to the input stream. var formatter = new BinaryFormatter(); +#pragma warning restore SYSLIB0011 // Type or member is obsolete var memoryStream = new MemoryStream(); var inputs = new int[] { 0, 1, 2, 3, 4 }; diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj index 9e06e5ca8..431f3765b 100644 --- a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj +++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj @@ -1,9 +1,10 @@  - net6.0 + net8.0 + true - + diff --git a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs index 830903ea9..7ad65bc40 100644 --- a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs +++ b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs @@ -20,7 +20,9 @@ internal class RDDCommandExecutor [ThreadStatic] private static MemoryStream s_writeOutputStream; [ThreadStatic] +#pragma warning disable SYSLIB0011 // Type or member is obsolete private static BinaryFormatter s_binaryFormatter; +#pragma warning restore SYSLIB0011 // Type or member is obsolete /// /// Executes the commands on the input data read from input stream @@ -111,8 +113,8 @@ private void Serialize( switch (serializerMode) { case CommandSerDe.SerializedMode.Byte: - BinaryFormatter formatter = s_binaryFormatter ??= new BinaryFormatter(); #pragma warning disable SYSLIB0011 // Type or member is obsolete + BinaryFormatter formatter = s_binaryFormatter ??= new BinaryFormatter(); // TODO: Replace BinaryFormatter with a new, secure serializer. formatter.Serialize(stream, message); #pragma warning restore SYSLIB0011 // Type or member is obsolete diff --git a/src/csharp/Microsoft.Spark.Worker/Command/SqlCommandExecutor.cs b/src/csharp/Microsoft.Spark.Worker/Command/SqlCommandExecutor.cs index 91b70381a..8438f3cb5 100644 --- a/src/csharp/Microsoft.Spark.Worker/Command/SqlCommandExecutor.cs +++ b/src/csharp/Microsoft.Spark.Worker/Command/SqlCommandExecutor.cs @@ -329,7 +329,7 @@ protected IEnumerable GetInputIterator(Stream inputStream) // When no input batches were received, return an empty RecordBatch // in order to create and write back the result schema. - int columnCount = reader.Schema.Fields.Count; + int columnCount = reader.Schema.FieldsList.Count; var arrays = new IArrowArray[columnCount]; for (int i = 0; i < columnCount; ++i) { @@ -511,7 +511,7 @@ private IEnumerable> GetArrowInputIterator(Stream in { // When no input batches were received, return empty IArrowArrays // in order to create and write back the result schema. - columnCount = reader.Schema.Fields.Count; + columnCount = reader.Schema.FieldsList.Count; arrays = ArrayPool.Shared.Rent(columnCount); for (int i = 0; i < columnCount; ++i) @@ -739,8 +739,8 @@ private RecordBatch WrapColumnsInStructIfApplicable(RecordBatch batch) { if (_version >= new Version(Versions.V3_0_0)) { - var fields = new Field[batch.Schema.Fields.Count]; - for (int i = 0; i < batch.Schema.Fields.Count; ++i) + var fields = new Field[batch.Schema.FieldsList.Count]; + for (int i = 0; i < batch.Schema.FieldsList.Count; ++i) { fields[i] = batch.Schema.GetFieldByIndex(i); } diff --git a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj index cd5e6d0eb..4f371e869 100644 --- a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj +++ b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj @@ -1,9 +1,10 @@  Exe - net461;net6.0 - net6.0 + net48;net8.0 + net8.0 Microsoft.Spark.Worker + true true @@ -11,7 +12,7 @@ - + diff --git a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs index 353358e44..17b857256 100644 --- a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs +++ b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using System; -using System.Diagnostics; using System.IO; using System.Net; using System.Runtime.Serialization.Formatters.Binary; @@ -46,8 +45,9 @@ internal BroadcastVariables Process(Stream stream) broadcastVars.Secret); } } - +#pragma warning disable SYSLIB0011 // Type or member is obsolete var formatter = new BinaryFormatter(); +#pragma warning restore SYSLIB0011 // Type or member is obsolete for (int i = 0; i < broadcastVars.Count; ++i) { long bid = SerDe.ReadInt64(stream); diff --git a/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj b/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj index 6bd14033e..93bd2fbf5 100644 --- a/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj +++ b/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj @@ -10,6 +10,7 @@ .NET for Apache Spark https://github.com/dotnet/spark/tree/master/docs/release-notes spark;dotnet;csharp + true @@ -28,12 +29,13 @@ - - - - - - + + + + + + + From 2c38b082b6830584d0d63d751493f8fed3e7e13c Mon Sep 17 00:00:00 2001 From: "Ihor.Zhuravlov" Date: Fri, 3 Jan 2025 16:20:19 +0100 Subject: [PATCH 2/5] Fixed build failure, logic for bulding path to a test folder, added a few git-ignores. --- .gitignore | 8 +++++++- azure-pipelines-pr.yml | 1 - azure-pipelines.yml | 2 ++ src/csharp/Microsoft.Spark.E2ETest/TestEnvironment.cs | 8 ++++---- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 5e938f1c2..7b325efc4 100644 --- a/.gitignore +++ b/.gitignore @@ -185,6 +185,7 @@ PublishScripts/ *.snupkg # The packages folder can be ignored because of Package Restore **/[Pp]ackages/* +.packages/* # except build/, which is used as an MSBuild target. !**/[Pp]ackages/build/ # Uncomment if necessary however generally it will be regenerated when needed @@ -372,4 +373,9 @@ hs_err_pid* .ionide/ # Mac dev -.DS_Store \ No newline at end of file +.DS_Store + +# Scala intermideate build files +**/.bloop/ +**/.metals/ +**/.bsp/** diff --git a/azure-pipelines-pr.yml b/azure-pipelines-pr.yml index f22b54cc1..975cbe5db 100644 --- a/azure-pipelines-pr.yml +++ b/azure-pipelines-pr.yml @@ -151,7 +151,6 @@ extends: - script: build.cmd -pack -c $(buildConfiguration) -ci - $(_OfficialBuildIdArgs) /p:PublishSparkWorker=true /p:SparkWorkerPublishDir=$(Build.ArtifactStagingDirectory)\Microsoft.Spark.Worker displayName: '.NET build' diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a6ae0b15b..720afbcae 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -133,6 +133,8 @@ stages: variables: ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: _OfficialBuildIdArgs: /p:OfficialBuildId=$(BUILD.BUILDNUMBER) + ${{ else }}: + _OfficialBuildIdArgs: '' steps: - task: DownloadBuildArtifacts@0 diff --git a/src/csharp/Microsoft.Spark.E2ETest/TestEnvironment.cs b/src/csharp/Microsoft.Spark.E2ETest/TestEnvironment.cs index 37e53b490..d8167897c 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/TestEnvironment.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/TestEnvironment.cs @@ -20,10 +20,10 @@ internal static string ResourceDirectory if (s_resourceDirectory is null) { s_resourceDirectory = - AppDomain.CurrentDomain.BaseDirectory + - Path.DirectorySeparatorChar + - "Resources" + - Path.DirectorySeparatorChar; + Path.Combine( + AppDomain.CurrentDomain.BaseDirectory, + "Resources") + + Path.DirectorySeparatorChar; } return s_resourceDirectory; From 0d9b55cccb86a0965d7abe99548b6d2f3d1d7b8e Mon Sep 17 00:00:00 2001 From: "Ihor.Zhuravlov" Date: Fri, 3 Jan 2025 17:24:30 +0100 Subject: [PATCH 3/5] Copy-pasted Arcade 8 common folder --- .../build-configuration.json | 4 + eng/common/CheckSymbols.ps1 | 158 ----- eng/common/PublishToPackageFeed.proj | 83 --- eng/common/PublishToSymbolServers.proj | 82 --- eng/common/SetupNugetSources.ps1 | 38 +- eng/common/SetupNugetSources.sh | 55 +- eng/common/SigningValidation.proj | 83 --- eng/common/SourceLinkValidation.ps1 | 184 ------ eng/common/build.ps1 | 115 ++-- eng/common/build.sh | 63 +- eng/common/cross/android/arm/toolchain.cmake | 41 -- .../cross/android/arm64/toolchain.cmake | 42 -- eng/common/cross/arm/sources.list.focal | 11 + eng/common/cross/arm/sources.list.jammy | 11 + eng/common/cross/arm/sources.list.trusty | 11 - eng/common/cross/arm/sources.list.xenial | 2 +- eng/common/cross/arm/tizen/tizen.patch | 9 + eng/common/cross/arm/trusty-lttng-2.4.patch | 71 --- eng/common/cross/arm/trusty.patch | 97 --- eng/common/cross/arm64/sources.list.focal | 11 + eng/common/cross/arm64/sources.list.jammy | 11 + eng/common/cross/arm64/sources.list.trusty | 11 - eng/common/cross/arm64/sources.list.xenial | 2 +- eng/common/cross/arm64/tizen/tizen.patch | 9 + eng/common/cross/armel/armel.jessie.patch | 43 ++ eng/common/cross/armel/tizen-fetch.sh | 171 ----- eng/common/cross/armel/tizen/tizen-dotnet.ks | 50 -- eng/common/cross/armel/tizen/tizen.patch | 9 - eng/common/cross/armv6/sources.list.buster | 2 + eng/common/cross/build-android-rootfs.sh | 108 ++-- eng/common/cross/build-rootfs.sh | 602 +++++++++++++++--- eng/common/cross/ppc64le/sources.list.bionic | 11 + eng/common/cross/riscv64/sources.list.sid | 1 + eng/common/cross/s390x/sources.list.bionic | 11 + .../cross/{armel => }/tizen-build-rootfs.sh | 43 +- eng/common/cross/tizen-fetch.sh | 172 +++++ eng/common/cross/toolchain.cmake | 354 ++++++++-- eng/common/darc-init.ps1 | 2 +- eng/common/darc-init.sh | 8 +- eng/common/dotnet-install.ps1 | 25 +- eng/common/dotnet-install.sh | 43 +- eng/common/enable-cross-org-publishing.ps1 | 11 +- eng/common/generate-graph-files.ps1 | 87 --- eng/common/generate-locproject.ps1 | 189 ++++++ eng/common/generate-sbom-prep.ps1 | 21 + eng/common/generate-sbom-prep.sh | 34 + eng/common/init-tools-native.ps1 | 170 +++-- eng/common/init-tools-native.sh | 163 ++++- eng/common/internal-feed-operations.ps1 | 35 +- eng/common/internal-feed-operations.sh | 9 +- eng/common/internal/Directory.Build.props | 2 +- eng/common/internal/NuGet.config | 7 + eng/common/internal/Tools.csproj | 7 +- eng/common/loc/P22DotNetHtmlLocalization.lss | 29 + eng/common/msbuild.ps1 | 7 +- eng/common/msbuild.sh | 2 +- eng/common/native/CommonLibrary.psm1 | 29 +- eng/common/native/common-library.sh | 16 +- eng/common/native/init-compiler.sh | 137 ++++ eng/common/native/init-distro-rid.sh | 130 ++++ eng/common/native/init-os-and-arch.sh | 80 +++ eng/common/native/install-cmake-test.sh | 10 +- eng/common/native/install-cmake.sh | 10 +- eng/common/native/install-tool.ps1 | 12 +- eng/common/performance/perfhelixpublish.proj | 102 --- eng/common/performance/performance-setup.ps1 | 106 --- eng/common/performance/performance-setup.sh | 216 ------- eng/common/pipeline-logging-functions.ps1 | 150 +++-- eng/common/pipeline-logging-functions.sh | 60 +- ...ote-build.ps1 => add-build-to-channel.ps1} | 20 +- .../post-build/check-channel-consistency.ps1 | 40 ++ eng/common/post-build/darc-gather-drop.ps1 | 45 -- eng/common/post-build/nuget-validation.ps1 | 11 +- eng/common/post-build/post-build-utils.ps1 | 33 +- eng/common/post-build/publish-using-darc.ps1 | 53 ++ eng/common/post-build/setup-maestro-vars.ps1 | 26 - .../post-build/sourcelink-validation.ps1 | 168 +++-- eng/common/post-build/symbols-validation.ps1 | 322 +++++++--- .../post-build/trigger-subscriptions.ps1 | 87 +-- eng/common/retain-build.ps1 | 45 ++ eng/common/sdk-task.ps1 | 40 +- eng/common/sdl/NuGet.config | 7 +- eng/common/sdl/configure-sdl-tool.ps1 | 130 ++++ eng/common/sdl/execute-all-sdl-tools.ps1 | 227 ++++--- eng/common/sdl/extract-artifact-archives.ps1 | 63 ++ eng/common/sdl/extract-artifact-packages.ps1 | 99 +-- eng/common/sdl/init-sdl.ps1 | 54 +- eng/common/sdl/packages.config | 2 +- eng/common/sdl/push-gdn.ps1 | 51 -- eng/common/sdl/run-sdl.ps1 | 78 +-- eng/common/sdl/sdl.ps1 | 40 ++ eng/common/sdl/trim-assets-version.ps1 | 75 +++ eng/common/templates-official/job/job.yml | 264 ++++++++ .../templates-official/job/onelocbuild.yml | 112 ++++ .../job/publish-build-assets.yml | 160 +++++ .../templates-official/job/source-build.yml | 75 +++ .../job/source-index-stage1.yml | 83 +++ .../templates-official/jobs/codeql-build.yml | 31 + eng/common/templates-official/jobs/jobs.yml | 97 +++ .../templates-official/jobs/source-build.yml | 54 ++ .../post-build/common-variables.yml | 22 + .../post-build/post-build.yml | 287 +++++++++ .../post-build/setup-maestro-vars.yml | 70 ++ .../post-build/trigger-subscription.yml | 13 + .../steps/add-build-to-channel.yml} | 4 +- .../templates-official/steps/build-reason.yml | 12 + .../steps/component-governance.yml | 13 + .../steps/enable-internal-runtimes.yml | 28 + .../steps/execute-codeql.yml | 32 + .../templates-official/steps/execute-sdl.yml | 86 +++ .../steps/generate-sbom.yml | 48 ++ .../steps/get-delegation-sas.yml | 52 ++ .../steps/get-federated-access-token.yml | 40 ++ .../templates-official/steps/publish-logs.yml | 23 + .../templates-official/steps/retain-build.yml | 28 + .../steps/send-to-helix.yml} | 43 +- .../templates-official/steps/source-build.yml | 129 ++++ .../variables/pool-providers.yml | 45 ++ .../variables/sdl-variables.yml | 7 + eng/common/templates/job/execute-sdl.yml | 138 ++-- .../templates/job/generate-graph-files.yml | 48 -- eng/common/templates/job/job.yml | 233 ++++--- eng/common/templates/job/onelocbuild.yml | 109 ++++ eng/common/templates/job/performance.yml | 95 --- .../templates/job/publish-build-assets.yml | 115 +++- eng/common/templates/job/source-build.yml | 74 +++ .../templates/job/source-index-stage1.yml | 82 +++ eng/common/templates/jobs/codeql-build.yml | 31 + eng/common/templates/jobs/jobs.yml | 97 +-- eng/common/templates/jobs/source-build.yml | 54 ++ eng/common/templates/phases/base.yml | 130 ---- .../templates/phases/publish-build-assets.yml | 51 -- .../channels/generic-internal-channel.yml | 139 ---- .../channels/generic-public-channel.yml | 148 ----- .../templates/post-build/common-variables.yml | 66 +- .../templates/post-build/darc-gather-drop.yml | 23 - .../templates/post-build/post-build.yml | 547 +++++++--------- .../templates/post-build/promote-build.yml | 25 - .../post-build/setup-maestro-vars.yml | 76 ++- .../templates/steps/add-build-to-channel.yml | 13 + .../templates/steps/component-governance.yml | 13 + .../steps/enable-internal-runtimes.yml | 28 + eng/common/templates/steps/execute-codeql.yml | 32 + eng/common/templates/steps/execute-sdl.yml | 89 +++ eng/common/templates/steps/generate-sbom.yml | 48 ++ .../templates/steps/get-delegation-sas.yml | 52 ++ .../steps/get-federated-access-token.yml | 40 ++ eng/common/templates/steps/publish-logs.yml | 23 + eng/common/templates/steps/retain-build.yml | 28 + eng/common/templates/steps/send-to-helix.yml | 20 +- eng/common/templates/steps/source-build.yml | 129 ++++ .../templates/steps/telemetry-start.yml | 2 +- .../templates/variables/pool-providers.yml | 57 ++ .../templates/variables/sdl-variables.yml | 7 + eng/common/tools.ps1 | 565 ++++++++++++---- eng/common/tools.sh | 332 +++++++--- 156 files changed, 7784 insertions(+), 4239 deletions(-) create mode 100644 eng/common/BuildConfiguration/build-configuration.json delete mode 100644 eng/common/CheckSymbols.ps1 delete mode 100644 eng/common/PublishToPackageFeed.proj delete mode 100644 eng/common/PublishToSymbolServers.proj delete mode 100644 eng/common/SigningValidation.proj delete mode 100644 eng/common/SourceLinkValidation.ps1 delete mode 100644 eng/common/cross/android/arm/toolchain.cmake delete mode 100644 eng/common/cross/android/arm64/toolchain.cmake create mode 100644 eng/common/cross/arm/sources.list.focal create mode 100644 eng/common/cross/arm/sources.list.jammy delete mode 100644 eng/common/cross/arm/sources.list.trusty create mode 100644 eng/common/cross/arm/tizen/tizen.patch delete mode 100644 eng/common/cross/arm/trusty-lttng-2.4.patch delete mode 100644 eng/common/cross/arm/trusty.patch create mode 100644 eng/common/cross/arm64/sources.list.focal create mode 100644 eng/common/cross/arm64/sources.list.jammy delete mode 100644 eng/common/cross/arm64/sources.list.trusty create mode 100644 eng/common/cross/arm64/tizen/tizen.patch create mode 100644 eng/common/cross/armel/armel.jessie.patch delete mode 100755 eng/common/cross/armel/tizen-fetch.sh delete mode 100644 eng/common/cross/armel/tizen/tizen-dotnet.ks create mode 100644 eng/common/cross/armv6/sources.list.buster create mode 100644 eng/common/cross/ppc64le/sources.list.bionic create mode 100644 eng/common/cross/riscv64/sources.list.sid create mode 100644 eng/common/cross/s390x/sources.list.bionic rename eng/common/cross/{armel => }/tizen-build-rootfs.sh (51%) create mode 100755 eng/common/cross/tizen-fetch.sh delete mode 100644 eng/common/generate-graph-files.ps1 create mode 100644 eng/common/generate-locproject.ps1 create mode 100644 eng/common/generate-sbom-prep.ps1 create mode 100755 eng/common/generate-sbom-prep.sh create mode 100644 eng/common/internal/NuGet.config create mode 100644 eng/common/loc/P22DotNetHtmlLocalization.lss create mode 100755 eng/common/native/init-compiler.sh create mode 100755 eng/common/native/init-distro-rid.sh create mode 100755 eng/common/native/init-os-and-arch.sh delete mode 100644 eng/common/performance/perfhelixpublish.proj delete mode 100644 eng/common/performance/performance-setup.ps1 delete mode 100755 eng/common/performance/performance-setup.sh rename eng/common/post-build/{promote-build.ps1 => add-build-to-channel.ps1} (56%) create mode 100644 eng/common/post-build/check-channel-consistency.ps1 delete mode 100644 eng/common/post-build/darc-gather-drop.ps1 create mode 100644 eng/common/post-build/publish-using-darc.ps1 delete mode 100644 eng/common/post-build/setup-maestro-vars.ps1 create mode 100644 eng/common/retain-build.ps1 create mode 100644 eng/common/sdl/configure-sdl-tool.ps1 create mode 100644 eng/common/sdl/extract-artifact-archives.ps1 delete mode 100644 eng/common/sdl/push-gdn.ps1 create mode 100644 eng/common/sdl/sdl.ps1 create mode 100644 eng/common/sdl/trim-assets-version.ps1 create mode 100644 eng/common/templates-official/job/job.yml create mode 100644 eng/common/templates-official/job/onelocbuild.yml create mode 100644 eng/common/templates-official/job/publish-build-assets.yml create mode 100644 eng/common/templates-official/job/source-build.yml create mode 100644 eng/common/templates-official/job/source-index-stage1.yml create mode 100644 eng/common/templates-official/jobs/codeql-build.yml create mode 100644 eng/common/templates-official/jobs/jobs.yml create mode 100644 eng/common/templates-official/jobs/source-build.yml create mode 100644 eng/common/templates-official/post-build/common-variables.yml create mode 100644 eng/common/templates-official/post-build/post-build.yml create mode 100644 eng/common/templates-official/post-build/setup-maestro-vars.yml create mode 100644 eng/common/templates-official/post-build/trigger-subscription.yml rename eng/common/{templates/steps/promote-build.yml => templates-official/steps/add-build-to-channel.yml} (68%) create mode 100644 eng/common/templates-official/steps/build-reason.yml create mode 100644 eng/common/templates-official/steps/component-governance.yml create mode 100644 eng/common/templates-official/steps/enable-internal-runtimes.yml create mode 100644 eng/common/templates-official/steps/execute-codeql.yml create mode 100644 eng/common/templates-official/steps/execute-sdl.yml create mode 100644 eng/common/templates-official/steps/generate-sbom.yml create mode 100644 eng/common/templates-official/steps/get-delegation-sas.yml create mode 100644 eng/common/templates-official/steps/get-federated-access-token.yml create mode 100644 eng/common/templates-official/steps/publish-logs.yml create mode 100644 eng/common/templates-official/steps/retain-build.yml rename eng/common/{templates/steps/perf-send-to-helix.yml => templates-official/steps/send-to-helix.yml} (57%) create mode 100644 eng/common/templates-official/steps/source-build.yml create mode 100644 eng/common/templates-official/variables/pool-providers.yml create mode 100644 eng/common/templates-official/variables/sdl-variables.yml delete mode 100644 eng/common/templates/job/generate-graph-files.yml create mode 100644 eng/common/templates/job/onelocbuild.yml delete mode 100644 eng/common/templates/job/performance.yml create mode 100644 eng/common/templates/job/source-build.yml create mode 100644 eng/common/templates/job/source-index-stage1.yml create mode 100644 eng/common/templates/jobs/codeql-build.yml create mode 100644 eng/common/templates/jobs/source-build.yml delete mode 100644 eng/common/templates/phases/base.yml delete mode 100644 eng/common/templates/phases/publish-build-assets.yml delete mode 100644 eng/common/templates/post-build/channels/generic-internal-channel.yml delete mode 100644 eng/common/templates/post-build/channels/generic-public-channel.yml delete mode 100644 eng/common/templates/post-build/darc-gather-drop.yml delete mode 100644 eng/common/templates/post-build/promote-build.yml create mode 100644 eng/common/templates/steps/add-build-to-channel.yml create mode 100644 eng/common/templates/steps/component-governance.yml create mode 100644 eng/common/templates/steps/enable-internal-runtimes.yml create mode 100644 eng/common/templates/steps/execute-codeql.yml create mode 100644 eng/common/templates/steps/execute-sdl.yml create mode 100644 eng/common/templates/steps/generate-sbom.yml create mode 100644 eng/common/templates/steps/get-delegation-sas.yml create mode 100644 eng/common/templates/steps/get-federated-access-token.yml create mode 100644 eng/common/templates/steps/publish-logs.yml create mode 100644 eng/common/templates/steps/retain-build.yml create mode 100644 eng/common/templates/steps/source-build.yml create mode 100644 eng/common/templates/variables/pool-providers.yml create mode 100644 eng/common/templates/variables/sdl-variables.yml diff --git a/eng/common/BuildConfiguration/build-configuration.json b/eng/common/BuildConfiguration/build-configuration.json new file mode 100644 index 000000000..3d1cc8989 --- /dev/null +++ b/eng/common/BuildConfiguration/build-configuration.json @@ -0,0 +1,4 @@ +{ + "RetryCountLimit": 1, + "RetryByAnyError": false +} diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1 deleted file mode 100644 index b8d84607b..000000000 --- a/eng/common/CheckSymbols.ps1 +++ /dev/null @@ -1,158 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored - [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory=$true)][string] $SymbolToolPath # Full path to directory where dotnet symbol-tool was installed -) - -Add-Type -AssemblyName System.IO.Compression.FileSystem - -function FirstMatchingSymbolDescriptionOrDefault { - param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols - [string] $SymbolsPath - ) - - $FileName = [System.IO.Path]::GetFileName($FullPath) - $Extension = [System.IO.Path]::GetExtension($FullPath) - - # Those below are potential symbol files that the `dotnet symbol` might - # return. Which one will be returned depend on the type of file we are - # checking and which type of file was uploaded. - - # The file itself is returned - $SymbolPath = $SymbolsPath + "\" + $FileName - - # PDB file for the module - $PdbPath = $SymbolPath.Replace($Extension, ".pdb") - - # PDB file for R2R module (created by crossgen) - $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb") - - # DBG file for a .so library - $SODbg = $SymbolPath.Replace($Extension, ".so.dbg") - - # DWARF file for a .dylib - $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf") - - .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null - - if (Test-Path $PdbPath) { - return "PDB" - } - elseif (Test-Path $NGenPdb) { - return "NGen PDB" - } - elseif (Test-Path $SODbg) { - return "DBG for SO" - } - elseif (Test-Path $DylibDwarf) { - return "Dwarf for Dylib" - } - elseif (Test-Path $SymbolPath) { - return "Module" - } - else { - return $null - } -} - -function CountMissingSymbols { - param( - [string] $PackagePath # Path to a NuGet package - ) - - # Ensure input file exist - if (!(Test-Path $PackagePath)) { - throw "Input file does not exist: $PackagePath" - } - - # Extensions for which we'll look for symbols - $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib") - - # How many files are missing symbol information - $MissingSymbols = 0 - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $PackageGuid = New-Guid - $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid - $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols" - - [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) - - # Makes easier to reference `symbol tool` - Push-Location $SymbolToolPath - - Get-ChildItem -Recurse $ExtractPath | - Where-Object {$RelevantExtensions -contains $_.Extension} | - ForEach-Object { - if ($_.FullName -Match "\\ref\\") { - Write-Host "`t Ignoring reference assembly file" $_.FullName - return - } - - $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath - $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath - - Write-Host -NoNewLine "`t Checking file" $_.FullName "... " - - if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { - Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")" - } - else { - $MissingSymbols++ - - if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { - Write-Host "No symbols found on MSDL or SymWeb!" - } - else { - if ($SymbolsOnMSDL -eq $null) { - Write-Host "No symbols found on MSDL!" - } - else { - Write-Host "No symbols found on SymWeb!" - } - } - } - } - - Pop-Location - - return $MissingSymbols -} - -function CheckSymbolsAvailable { - if (Test-Path $ExtractPath) { - Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue - } - - Get-ChildItem "$InputPath\*.nupkg" | - ForEach-Object { - $FileName = $_.Name - - # These packages from Arcade-Services include some native libraries that - # our current symbol uploader can't handle. Below is a workaround until - # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. - if ($FileName -Match "Microsoft\.DotNet\.Darc\.") { - Write-Host "Ignoring Arcade-services file: $FileName" - Write-Host - return - } - elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") { - Write-Host "Ignoring Arcade-services file: $FileName" - Write-Host - return - } - - Write-Host "Validating $FileName " - $Status = CountMissingSymbols "$InputPath\$FileName" - - if ($Status -ne 0) { - Write-Error "Missing symbols for $Status modules in the package $FileName" - } - - Write-Host - } -} - -CheckSymbolsAvailable diff --git a/eng/common/PublishToPackageFeed.proj b/eng/common/PublishToPackageFeed.proj deleted file mode 100644 index a1b133372..000000000 --- a/eng/common/PublishToPackageFeed.proj +++ /dev/null @@ -1,83 +0,0 @@ - - - - - - netcoreapp2.1 - - - - - - - - - - - - - - - - - - - - - - - - - https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json - https://dotnetfeed.blob.core.windows.net/arcade-validation/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore-tooling/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-entityframeworkcore/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-extensions/index.json - https://dotnetfeed.blob.core.windows.net/dotnet-coreclr/index.json - https://dotnetfeed.blob.core.windows.net/dotnet-sdk/index.json - https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json - https://dotnetfeed.blob.core.windows.net/dotnet-toolset/index.json - https://dotnetfeed.blob.core.windows.net/dotnet-windowsdesktop/index.json - https://dotnetfeed.blob.core.windows.net/nuget-nugetclient/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-entityframework6/index.json - https://dotnetfeed.blob.core.windows.net/aspnet-blazor/index.json - - - - - - - - - - - - diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj deleted file mode 100644 index 5d55e312b..000000000 --- a/eng/common/PublishToSymbolServers.proj +++ /dev/null @@ -1,82 +0,0 @@ - - - - - - netcoreapp2.1 - - - - - - - - - - - - - - - - 3650 - true - false - - - - - - - - - - - - - - - - - diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 83218ad7e..efa2fd72b 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -35,7 +35,7 @@ Set-StrictMode -Version 2.0 . $PSScriptRoot\tools.ps1 # Add source entry to PackageSources -function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) { +function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") if ($packageSource -eq $null) @@ -48,12 +48,11 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern else { Write-Host "Package source $SourceName already present." } - - AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password + AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd } # Add a credential node for the specified source -function AddCredential($creds, $source, $username, $password) { +function AddCredential($creds, $source, $username, $pwd) { # Looks for credential configuration for the given SourceName. Create it if none is found. $sourceElement = $creds.SelectSingleNode($Source) if ($sourceElement -eq $null) @@ -82,17 +81,18 @@ function AddCredential($creds, $source, $username, $password) { $passwordElement.SetAttribute("key", "ClearTextPassword") $sourceElement.AppendChild($passwordElement) | Out-Null } - $passwordElement.SetAttribute("value", $Password) + + $passwordElement.SetAttribute("value", $pwd) } -function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) { +function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." ForEach ($PackageSource in $maestroPrivateSources) { Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key - AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password + AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd } } @@ -144,18 +144,24 @@ if ($disabledSources -ne $null) { $userName = "dn-bot" # Insert credential nodes for Maestro's private feeds -InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password - -$dotnet3Source = $sources.SelectSingleNode("add[@key='dotnet3']") -if ($dotnet3Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet3-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "dotnet3-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password -} +InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password +# 3.1 uses a different feed url format so it's handled differently here $dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") if ($dotnet31Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password +} + +$dotnetVersions = @('5','6','7','8') + +foreach ($dotnetVersion in $dotnetVersions) { + $feedPrefix = "dotnet" + $dotnetVersion; + $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") + if ($dotnetSource -ne $null) { + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password + } } $doc.Save($filename) \ No newline at end of file diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index 751863d50..d387c7eac 100755 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -81,31 +81,6 @@ fi PackageSources=() -# Ensure dotnet3-internal and dotnet3-internal-transport are in the packageSources if the public dotnet3 feeds are present -grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding dotnet3-internal to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet3-internal') - - grep -i "" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet3-internal-transport') -fi - # Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal") + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding $FeedPrefix-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal-transport") + fi +done + # I want things split line by line PrevIFS=$IFS IFS=$'\n' @@ -165,4 +168,4 @@ if [ "$?" == "0" ]; then echo "Neutralized disablePackageSources entry for '$DisabledSourceName'" fi done -fi \ No newline at end of file +fi diff --git a/eng/common/SigningValidation.proj b/eng/common/SigningValidation.proj deleted file mode 100644 index 3d0ac80af..000000000 --- a/eng/common/SigningValidation.proj +++ /dev/null @@ -1,83 +0,0 @@ - - - - - - netcoreapp2.1 - - - - - - - - $(NuGetPackageRoot)Microsoft.DotNet.SignCheck\$(SignCheckVersion)\tools\Microsoft.DotNet.SignCheck.exe - - $(PackageBasePath) - signcheck.log - signcheck.errors.log - signcheck.exclusions.txt - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/eng/common/SourceLinkValidation.ps1 b/eng/common/SourceLinkValidation.ps1 deleted file mode 100644 index cb2d28cb9..000000000 --- a/eng/common/SourceLinkValidation.ps1 +++ /dev/null @@ -1,184 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored - [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory=$true)][string] $SourceLinkToolPath, # Full path to directory where dotnet SourceLink CLI was installed - [Parameter(Mandatory=$true)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade - [Parameter(Mandatory=$true)][string] $GHCommit # GitHub commit SHA used to build the packages -) - -# Cache/HashMap (File -> Exist flag) used to consult whether a file exist -# in the repository at a specific commit point. This is populated by inserting -# all files present in the repo at a specific commit point. -$global:RepoFiles = @{} - -$ValidatePackage = { - param( - [string] $PackagePath # Full path to a Symbols.NuGet package - ) - - # Ensure input file exist - if (!(Test-Path $PackagePath)) { - throw "Input file does not exist: $PackagePath" - } - - # Extensions for which we'll look for SourceLink information - # For now we'll only care about Portable & Embedded PDBs - $RelevantExtensions = @(".dll", ".exe", ".pdb") - - Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... " - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId - $FailedFiles = 0 - - Add-Type -AssemblyName System.IO.Compression.FileSystem - - [System.IO.Directory]::CreateDirectory($ExtractPath); - - $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) - - $zip.Entries | - Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | - ForEach-Object { - $FileName = $_.FullName - $Extension = [System.IO.Path]::GetExtension($_.Name) - $FakeName = -Join((New-Guid), $Extension) - $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName - - # We ignore resource DLLs - if ($FileName.EndsWith(".resources.dll")) { - return - } - - [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) - - $ValidateFile = { - param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $RealPath, - [ref] $FailedFiles - ) - - # Makes easier to reference `sourcelink cli` - Push-Location $using:SourceLinkToolPath - - $SourceLinkInfos = .\sourcelink.exe print-urls $FullPath | Out-String - - if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) { - $NumFailedLinks = 0 - - # We only care about Http addresses - $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches - - if ($Matches.Count -ne 0) { - $Matches.Value | - ForEach-Object { - $Link = $_ - $CommitUrl = -Join("https://raw.githubusercontent.com/", $using:GHRepoName, "/", $using:GHCommit, "/") - $FilePath = $Link.Replace($CommitUrl, "") - $Status = 200 - $Cache = $using:RepoFiles - - if ( !($Cache.ContainsKey($FilePath)) ) { - try { - $Uri = $Link -as [System.URI] - - # Only GitHub links are valid - if ($Uri.AbsoluteURI -ne $null -and $Uri.Host -match "github") { - $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode - } - else { - $Status = 0 - } - } - catch { - $Status = 0 - } - } - - if ($Status -ne 200) { - if ($NumFailedLinks -eq 0) { - if ($FailedFiles.Value -eq 0) { - Write-Host - } - - Write-Host "`tFile $RealPath has broken links:" - } - - Write-Host "`t`tFailed to retrieve $Link" - - $NumFailedLinks++ - } - } - } - - if ($NumFailedLinks -ne 0) { - $FailedFiles.value++ - $global:LASTEXITCODE = 1 - } - } - - Pop-Location - } - - &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles) - } - - $zip.Dispose() - - if ($FailedFiles -eq 0) { - Write-Host "Passed." - } -} - -function ValidateSourceLinkLinks { - if (!($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) { - Write-Host "GHRepoName should be in the format /" - $global:LASTEXITCODE = 1 - return - } - - if (!($GHCommit -Match "^[0-9a-fA-F]{40}$")) { - Write-Host "GHCommit should be a 40 chars hexadecimal string" - $global:LASTEXITCODE = 1 - return - } - - $RepoTreeURL = -Join("https://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1") - $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript") - - try { - # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash - $Data = Invoke-WebRequest $RepoTreeURL | ConvertFrom-Json | Select-Object -ExpandProperty tree - - foreach ($file in $Data) { - $Extension = [System.IO.Path]::GetExtension($file.path) - - if ($CodeExtensions.Contains($Extension)) { - $RepoFiles[$file.path] = 1 - } - } - } - catch { - Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL" - $global:LASTEXITCODE = 1 - return - } - - if (Test-Path $ExtractPath) { - Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue - } - - # Process each NuGet package in parallel - $Jobs = @() - Get-ChildItem "$InputPath\*.symbols.nupkg" | - ForEach-Object { - $Jobs += Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName - } - - foreach ($Job in $Jobs) { - Wait-Job -Id $Job.Id | Receive-Job - } -} - -Measure-Command { ValidateSourceLinkLinks } diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index e001ccb48..33a6f2d0e 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -18,56 +18,71 @@ Param( [switch] $sign, [switch] $pack, [switch] $publish, + [switch] $clean, [switch][Alias('bl')]$binaryLog, + [switch][Alias('nobl')]$excludeCIBinarylog, [switch] $ci, [switch] $prepareMachine, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $excludePrereleaseVS, + [switch] $nativeToolsOnMachine, [switch] $help, [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) -. $PSScriptRoot\tools.ps1 - +# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file +# some computer has this env var defined (e.g. Some HP) +if($env:Platform) { + $env:Platform="" +} function Print-Usage() { - Write-Host "Common settings:" - Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)" - Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild" - Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" - Write-Host " -binaryLog Output binary log (short: -bl)" - Write-Host " -help Print help and exit" - Write-Host "" - - Write-Host "Actions:" - Write-Host " -restore Restore dependencies (short: -r)" - Write-Host " -build Build solution (short: -b)" - Write-Host " -rebuild Rebuild solution" - Write-Host " -deploy Deploy built VSIXes" - Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" - Write-Host " -test Run all unit tests in the solution (short: -t)" - Write-Host " -integrationTest Run all integration tests in the solution" - Write-Host " -performanceTest Run all performance tests in the solution" - Write-Host " -pack Package build outputs into NuGet packages and Willow components" - Write-Host " -sign Sign build outputs" - Write-Host " -publish Publish artifacts (e.g. symbols)" - Write-Host "" - - Write-Host "Advanced settings:" - Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" - Write-Host " -ci Set when running on CI server" - Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" - Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" - Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." - Write-Host "" - - Write-Host "Command line arguments not listed above are passed thru to msbuild." - Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." + Write-Host "Common settings:" + Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)" + Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild" + Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" + Write-Host " -binaryLog Output binary log (short: -bl)" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Actions:" + Write-Host " -restore Restore dependencies (short: -r)" + Write-Host " -build Build solution (short: -b)" + Write-Host " -rebuild Rebuild solution" + Write-Host " -deploy Deploy built VSIXes" + Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" + Write-Host " -test Run all unit tests in the solution (short: -t)" + Write-Host " -integrationTest Run all integration tests in the solution" + Write-Host " -performanceTest Run all performance tests in the solution" + Write-Host " -pack Package build outputs into NuGet packages and Willow components" + Write-Host " -sign Sign build outputs" + Write-Host " -publish Publish artifacts (e.g. symbols)" + Write-Host " -clean Clean the solution" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" + Write-Host " -ci Set when running on CI server" + Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)" + Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" + Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" + Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" + Write-Host "" + + Write-Host "Command line arguments not listed above are passed thru to msbuild." + Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." } +. $PSScriptRoot\tools.ps1 + function InitializeCustomToolset { if (-not $restore) { return } - $script = Join-Path $EngRoot "restore-toolset.ps1" + $script = Join-Path $EngRoot 'restore-toolset.ps1' if (Test-Path $script) { . $script @@ -78,8 +93,8 @@ function Build { $toolsetBuildProj = InitializeToolset InitializeCustomToolset - $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "Build.binlog") } else { "" } - $platformArg = if ($platform) { "/p:Platform=$platform" } else { "" } + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } + $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } if ($projects) { # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. @@ -113,24 +128,30 @@ function Build { } try { - if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) { + if ($clean) { + if (Test-Path $ArtifactsDir) { + Remove-Item -Recurse -Force $ArtifactsDir + Write-Host 'Artifacts directory deleted.' + } + exit 0 + } + + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { Print-Usage exit 0 } if ($ci) { - $binaryLog = $true + if (-not $excludeCIBinarylog) { + $binaryLog = $true + } $nodeReuse = $false } - # Import custom tools configuration, if present in the repo. - # Note: Import in global scope so that the script set top-level variables without qualification. - $configureToolsetScript = Join-Path $EngRoot "configure-toolset.ps1" - if (Test-Path $configureToolsetScript) { - . $configureToolsetScript + if ($nativeToolsOnMachine) { + $env:NativeToolsOnMachine = $true } - - if (($restore) -and ($null -eq $env:DisableNativeToolsetInstalls)) { + if ($restore) { InitializeNativeTools } @@ -138,7 +159,7 @@ try { } catch { Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_ + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/build.sh b/eng/common/build.sh index 6236fc4d3..50af40cdd 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -19,6 +19,9 @@ usage() echo "Actions:" echo " --restore Restore dependencies (short: -r)" echo " --build Build solution (short: -b)" + echo " --sourceBuild Source-build the solution (short: -sb)" + echo " Will additionally trigger the following actions: --restore, --build, --pack" + echo " If --configuration is not set explicitly, will also set it to 'Release'" echo " --rebuild Rebuild solution" echo " --test Run all unit tests in the solution (short: -t)" echo " --integrationTest Run all integration tests in the solution" @@ -26,11 +29,13 @@ usage() echo " --pack Package build outputs into NuGet packages and Willow components" echo " --sign Sign build outputs" echo " --publish Publish artifacts (e.g. symbols)" + echo " --clean Clean the solution" echo "" echo "Advanced settings:" echo " --projects Project or solution file(s) to build" echo " --ci Set when running on CI server" + echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" echo " --prepareMachine Prepare machine for CI run, clean up processes after build" echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" @@ -53,6 +58,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" restore=false build=false +source_build=false rebuild=false test=false integration_test=false @@ -62,26 +68,32 @@ publish=false sign=false public=false ci=false +clean=false warn_as_error=true node_reuse=true binary_log=false +exclude_ci_binary_log=false pipelines_log=false projects='' -configuration='Debug' +configuration='' prepare_machine=false verbosity='minimal' +runtime_source_feed='' +runtime_source_feed_key='' properties='' - while [[ $# > 0 ]]; do - opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')" + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -help|-h) usage exit 0 ;; + -clean) + clean=true + ;; -configuration|-c) configuration=$2 shift @@ -93,6 +105,9 @@ while [[ $# > 0 ]]; do -binarylog|-bl) binary_log=true ;; + -excludeCIBinarylog|-nobl) + exclude_ci_binary_log=true + ;; -pipelineslog|-pl) pipelines_log=true ;; @@ -108,6 +123,12 @@ while [[ $# > 0 ]]; do -pack) pack=true ;; + -sourcebuild|-sb) + build=true + source_build=true + restore=true + pack=true + ;; -test|-t) test=true ;; @@ -141,6 +162,14 @@ while [[ $# > 0 ]]; do node_reuse=$2 shift ;; + -runtimesourcefeed) + runtime_source_feed=$2 + shift + ;; + -runtimesourcefeedkey) + runtime_source_feed_key=$2 + shift + ;; *) properties="$properties $1" ;; @@ -149,10 +178,16 @@ while [[ $# > 0 ]]; do shift done +if [[ -z "$configuration" ]]; then + if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi +fi + if [[ "$ci" == true ]]; then pipelines_log=true - binary_log=true node_reuse=false + if [[ "$exclude_ci_binary_log" == false ]]; then + binary_log=true + fi fi . "$scriptroot/tools.sh" @@ -184,6 +219,7 @@ function Build { /p:RepoRoot="$repo_root" \ /p:Restore=$restore \ /p:Build=$build \ + /p:ArcadeBuildFromSource=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ /p:Pack=$pack \ @@ -196,20 +232,15 @@ function Build { ExitWithExitCode 0 } -# Import custom tools configuration, if present in the repo. -configure_toolset_script="$eng_root/configure-toolset.sh" -if [[ -a "$configure_toolset_script" ]]; then - . "$configure_toolset_script" -fi - -# TODO: https://github.com/dotnet/arcade/issues/1468 -# Temporary workaround to avoid breaking change. -# Remove once repos are updated. -if [[ -n "${useInstalledDotNetCli:-}" ]]; then - use_installed_dotnet_cli="$useInstalledDotNetCli" +if [[ "$clean" == true ]]; then + if [ -d "$artifacts_dir" ]; then + rm -rf $artifacts_dir + echo "Artifacts directory deleted." + fi + exit 0 fi -if [[ "$restore" == true && -z ${DisableNativeToolsetInstalls:-} ]]; then +if [[ "$restore" == true ]]; then InitializeNativeTools fi diff --git a/eng/common/cross/android/arm/toolchain.cmake b/eng/common/cross/android/arm/toolchain.cmake deleted file mode 100644 index a7e1c7350..000000000 --- a/eng/common/cross/android/arm/toolchain.cmake +++ /dev/null @@ -1,41 +0,0 @@ -set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../) -set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot) -set(CLR_CMAKE_PLATFORM_ANDROID "Android") - -set(CMAKE_SYSTEM_NAME Linux) -set(CMAKE_SYSTEM_VERSION 1) -set(CMAKE_SYSTEM_PROCESSOR arm) - -## Specify the toolchain -set(TOOLCHAIN "arm-linux-androideabi") -set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN}) -set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-) - -find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang) -find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++) -find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang) -find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar) -find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar) -find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy) -find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump) - -add_compile_options(--sysroot=${CROSS_ROOTFS}) -add_compile_options(-fPIE) -add_compile_options(-mfloat-abi=soft) -include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/) -include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/) - -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie") - -set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) -set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) -set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) - -set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/android/arm64/toolchain.cmake b/eng/common/cross/android/arm64/toolchain.cmake deleted file mode 100644 index 29415899c..000000000 --- a/eng/common/cross/android/arm64/toolchain.cmake +++ /dev/null @@ -1,42 +0,0 @@ -set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../) -set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot) -set(CLR_CMAKE_PLATFORM_ANDROID "Android") - -set(CMAKE_SYSTEM_NAME Linux) -set(CMAKE_SYSTEM_VERSION 1) -set(CMAKE_SYSTEM_PROCESSOR aarch64) - -## Specify the toolchain -set(TOOLCHAIN "aarch64-linux-android") -set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN}) -set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-) - -find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang) -find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++) -find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang) -find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar) -find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar) -find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy) -find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump) - -add_compile_options(--sysroot=${CROSS_ROOTFS}) -add_compile_options(-fPIE) - -## Needed for Android or bionic specific conditionals -add_compile_options(-D__ANDROID__) -add_compile_options(-D__BIONIC__) - -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}") -set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie") - -set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) -set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) -set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) - -set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal new file mode 100644 index 000000000..4de2600c1 --- /dev/null +++ b/eng/common/cross/arm/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy new file mode 100644 index 000000000..6bb045302 --- /dev/null +++ b/eng/common/cross/arm/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty deleted file mode 100644 index 07d8f88d8..000000000 --- a/eng/common/cross/arm/sources.list.trusty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial index eacd86b7d..56fbb36a5 100644 --- a/eng/common/cross/arm/sources.list.xenial +++ b/eng/common/cross/arm/sources.list.xenial @@ -8,4 +8,4 @@ deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm/tizen/tizen.patch b/eng/common/cross/arm/tizen/tizen.patch new file mode 100644 index 000000000..fb12ade72 --- /dev/null +++ b/eng/common/cross/arm/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) ) diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch deleted file mode 100644 index 8e4dd7ae7..000000000 --- a/eng/common/cross/arm/trusty-lttng-2.4.patch +++ /dev/null @@ -1,71 +0,0 @@ -From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001 -From: Mathieu Desnoyers -Date: Thu, 7 May 2015 13:25:04 -0400 -Subject: [PATCH] Fix: building probe providers with C++ compiler - -Robert Daniels wrote: -> > I'm attempting to use lttng userspace tracing with a C++ application -> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6 -> > release of lttng. I've compiled lttng-modules, lttng-ust, and -> > lttng-tools and have been able to get a simple test working with C -> > code. When I attempt to run the hello.cxx test on my target it will -> > segfault. -> -> -> I spent a little time digging into this issue and finally discovered the -> cause of my segfault with ARM C++ tracepoints. -> -> There is a struct called 'lttng_event' in ust-events.h which contains an -> empty union 'u'. This was the cause of my issue. Under C, this empty union -> compiles to a zero byte member while under C++ it compiles to a one byte -> member, and in my case was four-byte aligned which caused my C++ code to -> have the 'cds_list_head node' offset incorrectly by four bytes. This lead -> to an incorrect linked list structure which caused my issue. -> -> Since this union is empty, I simply removed it from the struct and everything -> worked correctly. -> -> I don't know the history or purpose behind this empty union so I'd like to -> know if this is a safe fix. If it is I can submit a patch with the union -> removed. - -That's a very nice catch! - -We do not support building tracepoint probe provider with -g++ yet, as stated in lttng-ust(3): - -"- Note for C++ support: although an application instrumented with - tracepoints can be compiled with g++, tracepoint probes should be - compiled with gcc (only tested with gcc so far)." - -However, if it works fine with this fix, then I'm tempted to take it, -especially because removing the empty union does not appear to affect -the layout of struct lttng_event as seen from liblttng-ust, which must -be compiled with a C compiler, and from probe providers compiled with -a C compiler. So all we are changing is the layout of a probe provider -compiled with a C++ compiler, which is anyway buggy at the moment, -because it is not compatible with the layout expected by liblttng-ust -compiled with a C compiler. - -Reported-by: Robert Daniels -Signed-off-by: Mathieu Desnoyers ---- - include/lttng/ust-events.h | 2 -- - 1 file changed, 2 deletions(-) - -diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h -index 328a875..3d7a274 100644 ---- a/usr/include/lttng/ust-events.h -+++ b/usr/include/lttng/ust-events.h -@@ -407,8 +407,6 @@ struct lttng_event { - void *_deprecated1; - struct lttng_ctx *ctx; - enum lttng_ust_instrumentation instrumentation; -- union { -- } u; - struct cds_list_head node; /* Event list in session */ - struct cds_list_head _deprecated2; - void *_deprecated3; --- -2.7.4 - diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch deleted file mode 100644 index 2f2972f8e..000000000 --- a/eng/common/cross/arm/trusty.patch +++ /dev/null @@ -1,97 +0,0 @@ -diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h ---- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900 -+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900 -@@ -65,17 +65,17 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- return __sync_val_compare_and_swap_1(addr, old, _new); -+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new); - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- return __sync_val_compare_and_swap_2(addr, old, _new); -+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new); - #endif - case 4: -- return __sync_val_compare_and_swap_4(addr, old, _new); -+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new); - #if (CAA_BITS_PER_LONG == 64) - case 8: -- return __sync_val_compare_and_swap_8(addr, old, _new); -+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new); - #endif - } - _uatomic_link_error(); -@@ -100,20 +100,20 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- __sync_and_and_fetch_1(addr, val); -+ __sync_and_and_fetch_1((uint8_t *) addr, val); - return; - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- __sync_and_and_fetch_2(addr, val); -+ __sync_and_and_fetch_2((uint16_t *) addr, val); - return; - #endif - case 4: -- __sync_and_and_fetch_4(addr, val); -+ __sync_and_and_fetch_4((uint32_t *) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -- __sync_and_and_fetch_8(addr, val); -+ __sync_and_and_fetch_8((uint64_t *) addr, val); - return; - #endif - } -@@ -139,20 +139,20 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- __sync_or_and_fetch_1(addr, val); -+ __sync_or_and_fetch_1((uint8_t *) addr, val); - return; - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- __sync_or_and_fetch_2(addr, val); -+ __sync_or_and_fetch_2((uint16_t *) addr, val); - return; - #endif - case 4: -- __sync_or_and_fetch_4(addr, val); -+ __sync_or_and_fetch_4((uint32_t *) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -- __sync_or_and_fetch_8(addr, val); -+ __sync_or_and_fetch_8((uint64_t *) addr, val); - return; - #endif - } -@@ -180,17 +180,17 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- return __sync_add_and_fetch_1(addr, val); -+ return __sync_add_and_fetch_1((uint8_t *) addr, val); - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- return __sync_add_and_fetch_2(addr, val); -+ return __sync_add_and_fetch_2((uint16_t *) addr, val); - #endif - case 4: -- return __sync_add_and_fetch_4(addr, val); -+ return __sync_add_and_fetch_4((uint32_t *) addr, val); - #if (CAA_BITS_PER_LONG == 64) - case 8: -- return __sync_add_and_fetch_8(addr, val); -+ return __sync_add_and_fetch_8((uint64_t *) addr, val); - #endif - } - _uatomic_link_error(); diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal new file mode 100644 index 000000000..4de2600c1 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy new file mode 100644 index 000000000..6bb045302 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty deleted file mode 100644 index 07d8f88d8..000000000 --- a/eng/common/cross/arm64/sources.list.trusty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial index eacd86b7d..56fbb36a5 100644 --- a/eng/common/cross/arm64/sources.list.xenial +++ b/eng/common/cross/arm64/sources.list.xenial @@ -8,4 +8,4 @@ deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch new file mode 100644 index 000000000..af7c8be05 --- /dev/null +++ b/eng/common/cross/arm64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-littleaarch64) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch new file mode 100644 index 000000000..2d2615619 --- /dev/null +++ b/eng/common/cross/armel/armel.jessie.patch @@ -0,0 +1,43 @@ +diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h +--- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700 ++++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700 +@@ -69,10 +69,10 @@ + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_val_compare_and_swap_2(addr, old, _new); ++ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new); + #endif + case 4: +- return __sync_val_compare_and_swap_4(addr, old, _new); ++ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new); + #if (CAA_BITS_PER_LONG == 64) + case 8: + return __sync_val_compare_and_swap_8(addr, old, _new); +@@ -109,7 +109,7 @@ + return; + #endif + case 4: +- __sync_and_and_fetch_4(addr, val); ++ __sync_and_and_fetch_4((uint32_t*) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +@@ -148,7 +148,7 @@ + return; + #endif + case 4: +- __sync_or_and_fetch_4(addr, val); ++ __sync_or_and_fetch_4((uint32_t*) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +@@ -187,7 +187,7 @@ + return __sync_add_and_fetch_2(addr, val); + #endif + case 4: +- return __sync_add_and_fetch_4(addr, val); ++ return __sync_add_and_fetch_4((uint32_t*) addr, val); + #if (CAA_BITS_PER_LONG == 64) + case 8: + return __sync_add_and_fetch_8(addr, val); diff --git a/eng/common/cross/armel/tizen-fetch.sh b/eng/common/cross/armel/tizen-fetch.sh deleted file mode 100755 index ed70e0a86..000000000 --- a/eng/common/cross/armel/tizen-fetch.sh +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env bash -set -e - -if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then - VERBOSE=0 -fi - -Log() -{ - if [ $VERBOSE -ge $1 ]; then - echo ${@:2} - fi -} - -Inform() -{ - Log 1 -e "\x1B[0;34m$@\x1B[m" -} - -Debug() -{ - Log 2 -e "\x1B[0;32m$@\x1B[m" -} - -Error() -{ - >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" -} - -Fetch() -{ - URL=$1 - FILE=$2 - PROGRESS=$3 - if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then - CURL_OPT="--progress-bar" - else - CURL_OPT="--silent" - fi - curl $CURL_OPT $URL > $FILE -} - -hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } -hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } -hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } - -TMPDIR=$1 -if [ ! -d $TMPDIR ]; then - TMPDIR=./tizen_tmp - Debug "Create temporary directory : $TMPDIR" - mkdir -p $TMPDIR -fi - -TIZEN_URL=http://download.tizen.org/releases/milestone/tizen -BUILD_XML=build.xml -REPOMD_XML=repomd.xml -PRIMARY_XML=primary.xml -TARGET_URL="http://__not_initialized" - -Xpath_get() -{ - XPATH_RESULT='' - XPATH=$1 - XML_FILE=$2 - RESULT=$(xmllint --xpath $XPATH $XML_FILE) - if [[ -z ${RESULT// } ]]; then - Error "Can not find target from $XML_FILE" - Debug "Xpath = $XPATH" - exit 1 - fi - XPATH_RESULT=$RESULT -} - -fetch_tizen_pkgs_init() -{ - TARGET=$1 - PROFILE=$2 - Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" - - TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs - if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi - mkdir -p $TMP_PKG_DIR - - PKG_URL=$TIZEN_URL/$PROFILE/latest - - BUILD_XML_URL=$PKG_URL/$BUILD_XML - TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML - TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML - TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML - TMP_PRIMARYGZ=${TMP_PRIMARY}.gz - - Fetch $BUILD_XML_URL $TMP_BUILD - - Debug "fetch $BUILD_XML_URL to $TMP_BUILD" - - TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" - Xpath_get $TARGET_XPATH $TMP_BUILD - TARGET_PATH=$XPATH_RESULT - TARGET_URL=$PKG_URL/$TARGET_PATH - - REPOMD_URL=$TARGET_URL/repodata/repomd.xml - PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' - - Fetch $REPOMD_URL $TMP_REPOMD - - Debug "fetch $REPOMD_URL to $TMP_REPOMD" - - Xpath_get $PRIMARY_XPATH $TMP_REPOMD - PRIMARY_XML_PATH=$XPATH_RESULT - PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH - - Fetch $PRIMARY_URL $TMP_PRIMARYGZ - - Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" - - gunzip $TMP_PRIMARYGZ - - Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" -} - -fetch_tizen_pkgs() -{ - ARCH=$1 - PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' - - PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' - - for pkg in ${@:2} - do - Inform "Fetching... $pkg" - XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} - XPATH=${XPATH/_ARCH_/$ARCH} - Xpath_get $XPATH $TMP_PRIMARY - PKG_PATH=$XPATH_RESULT - - XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} - XPATH=${XPATH/_ARCH_/$ARCH} - Xpath_get $XPATH $TMP_PRIMARY - CHECKSUM=$XPATH_RESULT - - PKG_URL=$TARGET_URL/$PKG_PATH - PKG_FILE=$(basename $PKG_PATH) - PKG_PATH=$TMPDIR/$PKG_FILE - - Debug "Download $PKG_URL to $PKG_PATH" - Fetch $PKG_URL $PKG_PATH true - - echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null - if [ $? -ne 0 ]; then - Error "Fail to fetch $PKG_URL to $PKG_PATH" - Debug "Checksum = $CHECKSUM" - exit 1 - fi - done -} - -Inform "Initialize arm base" -fetch_tizen_pkgs_init standard base -Inform "fetch common packages" -fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic -fetch_tizen_pkgs noarch linux-glibc-devel -Inform "fetch coreclr packages" -fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu -Inform "fetch corefx packages" -fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel - -Inform "Initialize standard unified" -fetch_tizen_pkgs_init standard unified -Inform "fetch corefx packages" -fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release - diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks deleted file mode 100644 index 506d455bd..000000000 --- a/eng/common/cross/armel/tizen/tizen-dotnet.ks +++ /dev/null @@ -1,50 +0,0 @@ -lang en_US.UTF-8 -keyboard us -timezone --utc Asia/Seoul - -part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime - -rootpw tizen -desktop --autologinuser=root -user --name root --groups audio,video --password 'tizen' - -repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no -repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no - -%packages -tar -gzip - -sed -grep -gawk -perl - -binutils -findutils -util-linux -lttng-ust -userspace-rcu -procps-ng -tzdata -ca-certificates - - -### Core FX -libicu -libunwind -iputils -zlib -krb5 -libcurl -libopenssl - -%end - -%post - -### Update /tmp privilege -chmod 777 /tmp -#################################### - -%end diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch index d223427c9..ca7c7c1ff 100644 --- a/eng/common/cross/armel/tizen/tizen.patch +++ b/eng/common/cross/armel/tizen/tizen.patch @@ -7,12 +7,3 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so OUTPUT_FORMAT(elf32-littlearm) -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) ) +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) ) -diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so ---- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900 -+++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf32-littlearm) --GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a ) -+GROUP ( libpthread.so.0 libpthread_nonshared.a ) diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster new file mode 100644 index 000000000..f27fc4fb3 --- /dev/null +++ b/eng/common/cross/armv6/sources.list.buster @@ -0,0 +1,2 @@ +deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi +deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh index adceda877..f163fb9da 100755 --- a/eng/common/cross/build-android-rootfs.sh +++ b/eng/common/cross/build-android-rootfs.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -e -__NDK_Version=r14 +__NDK_Version=r21 usage() { @@ -16,18 +16,18 @@ usage() echo. echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." - echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android." + echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android." exit 1 } -__ApiLevel=21 # The minimum platform for arm64 is API level 21 +__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h __BuildArch=arm64 __AndroidArch=aarch64 __AndroidToolchain=aarch64-linux-android for i in "$@" do - lowerI="$(echo $i | awk '{print tolower($0)}')" + lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" case $lowerI in -?|-h|--help) usage @@ -53,13 +53,20 @@ for i in "$@" done # Obtain the location of the bash script to figure out where the root of the repo is. -__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -__Android_Cross_Dir="$__CrossDir/android-rootfs" -__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version" -__libunwind_Dir="$__Android_Cross_Dir/libunwind" -__lldb_Dir="$__Android_Cross_Dir/lldb" -__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch" +__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs" + +if [[ ! -f "$__CrossDir" ]]; then + mkdir -p "$__CrossDir" +fi + +# Resolve absolute path to avoid `../` in build logs +__CrossDir="$( cd "$__CrossDir" && pwd )" + +__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version" +__lldb_Dir="$__CrossDir/lldb" +__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version" if [[ -n "$TOOLCHAIN_DIR" ]]; then __ToolchainDir=$TOOLCHAIN_DIR @@ -78,60 +85,47 @@ echo "Target Toolchain location: $__ToolchainDir" if [ ! -d $__NDK_Dir ]; then echo Downloading the NDK into $__NDK_Dir mkdir -p $__NDK_Dir - wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip - unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip + unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir fi if [ ! -d $__lldb_Dir ]; then mkdir -p $__lldb_Dir echo Downloading LLDB into $__lldb_Dir - wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip - unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip + unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir fi -# Create the RootFS for both arm64 as well as aarch -rm -rf $__Android_Cross_Dir/toolchain - -echo Generating the $__BuildArch toolchain -$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir - -# Install the required packages into the toolchain -# TODO: Add logic to get latest pkg version instead of specific version number -rm -rf $__Android_Cross_Dir/deb/ -rm -rf $__Android_Cross_Dir/tmp - -mkdir -p $__Android_Cross_Dir/deb/ -mkdir -p $__Android_Cross_Dir/tmp/$arch/ -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb - -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb -wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb - -echo Unpacking Termux packages -dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ -dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ - -cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/ +echo "Download dependencies..." +__TmpDir=$__CrossDir/tmp/$__BuildArch/ +mkdir -p "$__TmpDir" -# Generate platform file for build.sh script to assign to __DistroRid -echo "Generating platform file..." +# combined dependencies for coreclr, installer and libraries +__AndroidPackages="libicu" +__AndroidPackages+=" libandroid-glob" +__AndroidPackages+=" liblzma" +__AndroidPackages+=" krb5" +__AndroidPackages+=" openssl" -echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform -echo Now run: -echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0 +for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/main/binary-$__AndroidArch/Packages |\ + grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do + if [[ "$path" != "Filename:" ]]; then + echo "Working on: $path" + wget -qO- https://packages.termux.dev/termux-main-21/$path | dpkg -x - "$__TmpDir" + fi +done + +cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/" + +# Generate platform file for build.sh script to assign to __DistroRid +echo "Generating platform file..." +echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform + +echo "Now to build coreclr, libraries and installers; run:" +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory coreclr +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory libraries +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory installer diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index d7d5d7d5f..9caf9b021 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -1,19 +1,34 @@ #!/usr/bin/env bash +set -e + usage() { - echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), armel, arm64, x86" - echo "LinuxCodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen." - echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine" + echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" + echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" + echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" + echo " for alpine can be specified with version: alpineX.YY or alpineedge" + echo " for FreeBSD can be: freebsd12, freebsd13" + echo " for illumos can be: illumos" + echo " for Haiku can be: haiku." + echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" + echo "llvmx[.y] - optional, LLVM version for LLVM related packages." echo "--skipunmount - optional, will skip the unmount of rootfs folder." + echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." + echo "--use-mirror - optional, use mirror URL to fetch resources, when available." + echo "--jobs N - optional, restrict to N jobs." exit 1 } -__LinuxCodeName=xenial +__CodeName=xenial __CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__InitialDir=$PWD __BuildArch=arm +__AlpineArch=armv7 +__FreeBSDArch=arm +__FreeBSDMachineArch=armv7 +__IllumosArch=arm7 +__HaikuArch=arm +__QEMUArch=arm __UbuntuArch=armhf __UbuntuRepo="http://ports.ubuntu.com/" __LLDB_Package="liblldb-3.9-dev" @@ -26,22 +41,26 @@ __AlpinePackages="alpine-base" __AlpinePackages+=" build-base" __AlpinePackages+=" linux-headers" __AlpinePackages+=" lldb-dev" -__AlpinePackages+=" llvm-dev" +__AlpinePackages+=" python3" +__AlpinePackages+=" libedit" # symlinks fixer __UbuntuPackages+=" symlinks" -# CoreCLR and CoreFX dependencies +# runtime dependencies __UbuntuPackages+=" libicu-dev" __UbuntuPackages+=" liblttng-ust-dev" __UbuntuPackages+=" libunwind8-dev" +__UbuntuPackages+=" libnuma-dev" __AlpinePackages+=" gettext-dev" __AlpinePackages+=" icu-dev" __AlpinePackages+=" libunwind-dev" __AlpinePackages+=" lttng-ust-dev" +__AlpinePackages+=" compiler-rt" +__AlpinePackages+=" numactl-dev" -# CoreFX dependencies +# runtime libraries' dependencies __UbuntuPackages+=" libcurl4-openssl-dev" __UbuntuPackages+=" libkrb5-dev" __UbuntuPackages+=" libssl-dev" @@ -52,22 +71,83 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" +__FreeBSDBase="12.4-RELEASE" +__FreeBSDPkg="1.17.0" +__FreeBSDABI="12" +__FreeBSDPackages="libunwind" +__FreeBSDPackages+=" icu" +__FreeBSDPackages+=" libinotify" +__FreeBSDPackages+=" openssl" +__FreeBSDPackages+=" krb5" +__FreeBSDPackages+=" terminfo-db" + +__IllumosPackages="icu" +__IllumosPackages+=" mit-krb5" +__IllumosPackages+=" openssl" +__IllumosPackages+=" zlib" + +__HaikuPackages="gcc_syslibs" +__HaikuPackages+=" gcc_syslibs_devel" +__HaikuPackages+=" gmp" +__HaikuPackages+=" gmp_devel" +__HaikuPackages+=" icu66" +__HaikuPackages+=" icu66_devel" +__HaikuPackages+=" krb5" +__HaikuPackages+=" krb5_devel" +__HaikuPackages+=" libiconv" +__HaikuPackages+=" libiconv_devel" +__HaikuPackages+=" llvm12_libunwind" +__HaikuPackages+=" llvm12_libunwind_devel" +__HaikuPackages+=" mpfr" +__HaikuPackages+=" mpfr_devel" +__HaikuPackages+=" openssl" +__HaikuPackages+=" openssl_devel" +__HaikuPackages+=" zlib" +__HaikuPackages+=" zlib_devel" + +# ML.NET dependencies +__UbuntuPackages+=" libomp5" +__UbuntuPackages+=" libomp-dev" + +# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133 +__AlpineKeys=' +4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB +5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB +524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB +5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB +58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB +58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB +58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB +60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB +6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ== +61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== +616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== +616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== +616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ== +616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== +616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== +616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== +' +__Keyring= +__SkipSigCheck=0 +__UseMirror=0 + __UnprocessedBuildArgs= while :; do - if [ $# -le 0 ]; then + if [[ "$#" -le 0 ]]; then break fi - lowerI="$(echo $1 | awk '{print tolower($0)}')" + lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case $lowerI in - -?|-h|--help) + -\?|-h|--help) usage exit 1 ;; arm) __BuildArch=arm __UbuntuArch=armhf - __AlpineArch=armhf + __AlpineArch=armv7 __QEMUArch=arm ;; arm64) @@ -75,93 +155,219 @@ while :; do __UbuntuArch=arm64 __AlpineArch=aarch64 __QEMUArch=aarch64 + __FreeBSDArch=arm64 + __FreeBSDMachineArch=aarch64 ;; armel) __BuildArch=armel __UbuntuArch=armel __UbuntuRepo="http://ftp.debian.org/debian/" - __LinuxCodeName=jessie + __CodeName=jessie ;; - x86) - __BuildArch=x86 - __UbuntuArch=i386 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + armv6) + __BuildArch=armv6 + __UbuntuArch=armhf + __QEMUArch=arm + __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + + if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg" + fi ;; - lldb3.6) - __LLDB_Package="lldb-3.6-dev" + riscv64) + __BuildArch=riscv64 + __AlpineArch=riscv64 + __AlpinePackages="${__AlpinePackages// lldb-dev/}" + __QEMUArch=riscv64 + __UbuntuArch=riscv64 + __UbuntuRepo="http://deb.debian.org/debian-ports" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + unset __LLDB_Package + + if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring" + fi ;; - lldb3.8) - __LLDB_Package="lldb-3.8-dev" + ppc64le) + __BuildArch=ppc64le + __AlpineArch=ppc64le + __QEMUArch=ppc64le + __UbuntuArch=ppc64el + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package ;; - lldb3.9) - __LLDB_Package="liblldb-3.9-dev" + s390x) + __BuildArch=s390x + __AlpineArch=s390x + __QEMUArch=s390x + __UbuntuArch=s390x + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package ;; - lldb4.0) - __LLDB_Package="liblldb-4.0-dev" + x64) + __BuildArch=x64 + __AlpineArch=x86_64 + __UbuntuArch=amd64 + __FreeBSDArch=amd64 + __FreeBSDMachineArch=amd64 + __illumosArch=x86_64 + __HaikuArch=x86_64 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; - lldb5.0) - __LLDB_Package="liblldb-5.0-dev" + x86) + __BuildArch=x86 + __UbuntuArch=i386 + __AlpineArch=x86 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; - lldb6.0) - __LLDB_Package="liblldb-6.0-dev" + lldb*) + version="${lowerI/lldb/}" + parts=(${version//./ }) + + # for versions > 6.0, lldb has dropped the minor version + if [[ "${parts[0]}" -gt 6 ]]; then + version="${parts[0]}" + fi + + __LLDB_Package="liblldb-${version}-dev" ;; no-lldb) unset __LLDB_Package ;; - trusty) # Ubuntu 14.04 - if [ "$__LinuxCodeName" != "jessie" ]; then - __LinuxCodeName=trusty + llvm*) + version="${lowerI/llvm/}" + parts=(${version//./ }) + __LLVM_MajorVersion="${parts[0]}" + __LLVM_MinorVersion="${parts[1]}" + + # for versions > 6.0, llvm has dropped the minor version + if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then + __LLVM_MinorVersion=0; fi ;; xenial) # Ubuntu 16.04 - if [ "$__LinuxCodeName" != "jessie" ]; then - __LinuxCodeName=xenial + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=xenial fi ;; zesty) # Ubuntu 17.04 - if [ "$__LinuxCodeName" != "jessie" ]; then - __LinuxCodeName=zesty + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=zesty fi ;; bionic) # Ubuntu 18.04 - if [ "$__LinuxCodeName" != "jessie" ]; then - __LinuxCodeName=bionic + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=bionic + fi + ;; + focal) # Ubuntu 20.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=focal + fi + ;; + jammy) # Ubuntu 22.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=jammy fi ;; jessie) # Debian 8 - __LinuxCodeName=jessie - __UbuntuRepo="http://ftp.debian.org/debian/" + __CodeName=jessie + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi ;; stretch) # Debian 9 - __LinuxCodeName=stretch - __UbuntuRepo="http://ftp.debian.org/debian/" + __CodeName=stretch __LLDB_Package="liblldb-6.0-dev" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi ;; buster) # Debian 10 - __LinuxCodeName=buster - __UbuntuRepo="http://ftp.debian.org/debian/" + __CodeName=buster __LLDB_Package="liblldb-6.0-dev" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi ;; - tizen) - if [ "$__BuildArch" != "armel" ]; then - echo "Tizen is available only for armel." - usage; - exit 1; + bullseye) # Debian 11 + __CodeName=bullseye + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" fi - __LinuxCodeName= + ;; + sid) # Debian sid + __CodeName=sid + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + tizen) + __CodeName= __UbuntuRepo= __Tizen=tizen ;; - alpine) - __LinuxCodeName=alpine + alpine*) + __CodeName=alpine __UbuntuRepo= + version="${lowerI/alpine/}" + + if [[ "$version" == "edge" ]]; then + __AlpineVersion=edge + else + parts=(${version//./ }) + __AlpineMajorVersion="${parts[0]}" + __AlpineMinoVersion="${parts[1]}" + __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion" + fi + ;; + freebsd12) + __CodeName=freebsd + __SkipUnmount=1 + ;; + freebsd13) + __CodeName=freebsd + __FreeBSDBase="13.2-RELEASE" + __FreeBSDABI="13" + __SkipUnmount=1 + ;; + illumos) + __CodeName=illumos + __SkipUnmount=1 + ;; + haiku) + __CodeName=haiku + __SkipUnmount=1 ;; --skipunmount) __SkipUnmount=1 ;; + --skipsigcheck) + __SkipSigCheck=1 + ;; --rootfsdir|-rootfsdir) shift - __RootfsDir=$1 + __RootfsDir="$1" + ;; + --use-mirror) + __UseMirror=1 + ;; + --use-jobs) + shift + MAXJOBS=$1 ;; *) __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" @@ -171,62 +377,270 @@ while :; do shift done -if [ "$__BuildArch" == "armel" ]; then +case "$__AlpineVersion" in + 3.14) __AlpinePackages+=" llvm11-libs" ;; + 3.15) __AlpinePackages+=" llvm12-libs" ;; + 3.16) __AlpinePackages+=" llvm13-libs" ;; + 3.17) __AlpinePackages+=" llvm15-libs" ;; + edge) __AlpineLlvmLibsLookup=1 ;; + *) + if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then + __AlpineVersion=3.15 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm12-libs" + elif [[ "$__AlpineArch" == "x86" ]]; then + __AlpineVersion=3.17 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm15-libs" + elif [[ "$__AlpineArch" == "riscv64" ]]; then + __AlpineLlvmLibsLookup=1 + __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive) + else + __AlpineVersion=3.13 # 3.13 to maximize compatibility + __AlpinePackages+=" llvm10-libs" + + if [[ "$__AlpineArch" == "armv7" ]]; then + __AlpinePackages="${__AlpinePackages//numactl-dev/}" + fi + fi +esac + +if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then + # compiler-rt--static was merged in compiler-rt package in alpine 3.16 + # for older versions, we need compiler-rt--static, so replace the name + __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" +fi + +if [[ "$__BuildArch" == "armel" ]]; then __LLDB_Package="lldb-3.5-dev" fi + +if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then + # libnuma-dev is not available on armhf for xenial + __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}" +fi + __UbuntuPackages+=" ${__LLDB_Package:-}" -if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then - __RootfsDir=$ROOTFS_DIR +if [[ -n "$__LLVM_MajorVersion" ]]; then + __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" +fi + +if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then + __RootfsDir="$ROOTFS_DIR" fi -if [ -z "$__RootfsDir" ]; then +if [[ -z "$__RootfsDir" ]]; then __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" fi -if [ -d "$__RootfsDir" ]; then - if [ $__SkipUnmount == 0 ]; then - umount $__RootfsDir/* +if [[ -d "$__RootfsDir" ]]; then + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true fi - rm -rf $__RootfsDir + rm -rf "$__RootfsDir" fi -if [[ "$__LinuxCodeName" == "alpine" ]]; then - __ApkToolsVersion=2.9.1 - __AlpineVersion=3.7 - __ApkToolsDir=$(mktemp -d) - wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir - tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir - mkdir -p $__RootfsDir/usr/bin - cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin - $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \ - -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \ - -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \ - -X http://dl-cdn.alpinelinux.org/alpine/edge/testing \ - -X http://dl-cdn.alpinelinux.org/alpine/edge/main \ - -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ - add $__AlpinePackages - rm -r $__ApkToolsDir -elif [[ -n $__LinuxCodeName ]]; then - qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo - cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list - chroot $__RootfsDir apt-get update - chroot $__RootfsDir apt-get -f -y install - chroot $__RootfsDir apt-get -y install $__UbuntuPackages - chroot $__RootfsDir symlinks -cr /usr - - if [ $__SkipUnmount == 0 ]; then - umount $__RootfsDir/* +mkdir -p "$__RootfsDir" +__RootfsDir="$( cd "$__RootfsDir" && pwd )" + +if [[ "$__CodeName" == "alpine" ]]; then + __ApkToolsVersion=2.12.11 + __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33 + __ApkToolsDir="$(mktemp -d)" + __ApkKeysDir="$(mktemp -d)" + + wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" + echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c + chmod +x "$__ApkToolsDir/apk.static" + + if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then + mkdir -p "$__RootfsDir"/usr/bin + cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" + fi + + if [[ "$__AlpineVersion" == "edge" ]]; then + version=edge + else + version="v$__AlpineVersion" + fi + + for line in $__AlpineKeys; do + id="${line%%:*}" + content="${line#*:}" + + echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub" + done + + if [[ "$__SkipSigCheck" == "1" ]]; then + __ApkSignatureArg="--allow-untrusted" + else + __ApkSignatureArg="--keys-dir $__ApkKeysDir" + fi + + # initialize DB + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add + + if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then + __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')" + fi + + # install all packages in one go + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + add $__AlpinePackages + + rm -r "$__ApkToolsDir" +elif [[ "$__CodeName" == "freebsd" ]]; then + mkdir -p "$__RootfsDir"/usr/local/etc + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + mkdir -p "$__RootfsDir"/tmp + # get and build package manager + wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # needed for install to succeed + mkdir -p "$__RootfsDir"/host/etc + ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install + rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # install packages we need. + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages +elif [[ "$__CodeName" == "illumos" ]]; then + mkdir "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + echo "Downloading sysroot." + wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + echo "Building binutils. Please wait.." + wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + mkdir build-binutils && cd build-binutils + ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" + make -j "$JOBS" && make install && cd .. + echo "Building gcc. Please wait.." + wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + CFLAGS="-fPIC" + CXXFLAGS="-fPIC" + CXXFLAGS_FOR_TARGET="-fPIC" + CFLAGS_FOR_TARGET="-fPIC" + export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET + mkdir build-gcc && cd build-gcc + ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ + --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ + --disable-libquadmath-support --disable-shared --enable-tls + make -j "$JOBS" && make install && cd .. + BaseUrl=https://pkgsrc.smartos.org + if [[ "$__UseMirror" == 1 ]]; then + BaseUrl=https://pkgsrc.smartos.skylime.net + fi + BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" + echo "Downloading manifest" + wget "$BaseUrl" + echo "Downloading dependencies." + read -ra array <<<"$__IllumosPackages" + for package in "${array[@]}"; do + echo "Installing '$package'" + # find last occurrence of package in listing and extract its name + package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" + echo "Resolved name '$package'" + wget "$BaseUrl"/"$package".tgz + ar -x "$package".tgz + tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null + done + echo "Cleaning up temporary files." + popd + rm -rf "$__RootfsDir"/{tmp,+*} + mkdir -p "$__RootfsDir"/usr/include/net + mkdir -p "$__RootfsDir"/usr/include/netpacket + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h +elif [[ "$__CodeName" == "haiku" ]]; then + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + echo "Building Haiku sysroot for $__HaikuArch" + mkdir -p "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + + mkdir "$__RootfsDir/tmp/download" + + echo "Downloading Haiku package tool" + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script + wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools) + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" + + DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" + HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + + # Download Haiku packages + echo "Downloading Haiku packages" + read -ra array <<<"$__HaikuPackages" + for package in "${array[@]}"; do + echo "Downloading $package..." + # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 + # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 + hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + done + for package in haiku haiku_devel; do + echo "Downloading $package..." + hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + done + + # Set up the sysroot + echo "Setting up sysroot and extracting required packages" + mkdir -p "$__RootfsDir/boot/system" + for file in "$__RootfsDir/tmp/download/"*.hpkg; do + echo "Extracting $file..." + LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file" + done + + # Download buildtools + echo "Downloading Haiku buildtools" + wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch) + unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" + + # Cleaning up temporary files + echo "Cleaning up temporary files" + popd + rm -rf "$__RootfsDir/tmp" +elif [[ -n "$__CodeName" ]]; then + + if [[ "$__SkipSigCheck" == "0" ]]; then + __Keyring="$__Keyring --force-check-gpg" + fi + + debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" + chroot "$__RootfsDir" apt-get update + chroot "$__RootfsDir" apt-get -f -y install + chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages + chroot "$__RootfsDir" symlinks -cr /usr + chroot "$__RootfsDir" apt-get clean + + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true fi - if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then - pushd $__RootfsDir - patch -p1 < $__CrossDir/$__BuildArch/trusty.patch - patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch + if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then + pushd "$__RootfsDir" + patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch" popd fi -elif [ "$__Tizen" == "tizen" ]; then - ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh +elif [[ "$__Tizen" == "tizen" ]]; then + ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" else echo "Unsupported target platform." usage; diff --git a/eng/common/cross/ppc64le/sources.list.bionic b/eng/common/cross/ppc64le/sources.list.bionic new file mode 100644 index 000000000..210955740 --- /dev/null +++ b/eng/common/cross/ppc64le/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid new file mode 100644 index 000000000..65f730d22 --- /dev/null +++ b/eng/common/cross/riscv64/sources.list.sid @@ -0,0 +1 @@ +deb http://deb.debian.org/debian-ports sid main diff --git a/eng/common/cross/s390x/sources.list.bionic b/eng/common/cross/s390x/sources.list.bionic new file mode 100644 index 000000000..210955740 --- /dev/null +++ b/eng/common/cross/s390x/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh similarity index 51% rename from eng/common/cross/armel/tizen-build-rootfs.sh rename to eng/common/cross/tizen-build-rootfs.sh index 87c48e78f..ac84173d4 100755 --- a/eng/common/cross/armel/tizen-build-rootfs.sh +++ b/eng/common/cross/tizen-build-rootfs.sh @@ -1,27 +1,46 @@ #!/usr/bin/env bash set -e -__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen" +ARCH=$1 +LINK_ARCH=$ARCH + +case "$ARCH" in + arm) + TIZEN_ARCH="armv7hl" + ;; + armel) + TIZEN_ARCH="armv7l" + LINK_ARCH="arm" + ;; + arm64) + TIZEN_ARCH="aarch64" + ;; + x86) + TIZEN_ARCH="i686" + ;; + x64) + TIZEN_ARCH="x86_64" + LINK_ARCH="x86" + ;; + *) + echo "Unsupported architecture for tizen: $ARCH" + exit 1 +esac + +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen" if [[ -z "$ROOTFS_DIR" ]]; then echo "ROOTFS_DIR is not defined." exit 1; fi -# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.) -# hk0110 -if [ -d "$ROOTFS_DIR" ]; then - umount $ROOTFS_DIR/* - rm -rf $ROOTFS_DIR -fi - TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp mkdir -p $TIZEN_TMP_DIR # Download files echo ">>Start downloading files" -VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH echo "<>Start constructing Tizen rootfs" @@ -37,8 +56,6 @@ rm -rf $TIZEN_TMP_DIR # Configure Tizen rootfs echo ">>Start configuring Tizen rootfs" -rm ./usr/lib/libunwind.so -ln -s libunwind.so.8 ./usr/lib/libunwind.so -ln -sfn asm-arm ./usr/include/asm +ln -sfn asm-${LINK_ARCH} ./usr/include/asm patch -p1 < $__TIZEN_CROSSDIR/tizen.patch echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_ARCH=$2 + +TIZEN_URL=http://download.tizen.org/snapshots/TIZEN/Tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize ${TIZEN_ARCH} base" +fetch_tizen_pkgs_init standard Tizen-Base +Inform "fetch common packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard Tizen-Unified +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index 071d41124..dafabdcae 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -1,88 +1,308 @@ set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) +# reset platform variables (e.g. cmake 3.25 sets LINUX=1) +unset(LINUX) +unset(FREEBSD) +unset(ILLUMOS) +unset(ANDROID) +unset(TIZEN) +unset(HAIKU) + set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) -set(CMAKE_SYSTEM_NAME Linux) +if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) + set(CMAKE_SYSTEM_NAME FreeBSD) + set(FREEBSD 1) +elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) + set(CMAKE_SYSTEM_NAME SunOS) + set(ILLUMOS 1) +elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) + set(CMAKE_SYSTEM_NAME Haiku) + set(HAIKU 1) +else() + set(CMAKE_SYSTEM_NAME Linux) + set(LINUX 1) +endif() set(CMAKE_SYSTEM_VERSION 1) -if(TARGET_ARCH_NAME STREQUAL "armel") - set(CMAKE_SYSTEM_PROCESSOR armv7l) - set(TOOLCHAIN "arm-linux-gnueabi") - if("$ENV{__DistroRid}" MATCHES "tizen.*") - set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "arm") +if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release) + set(TIZEN 1) +elseif(EXISTS ${CROSS_ROOTFS}/android_platform) + set(ANDROID 1) +endif() + +if(TARGET_ARCH_NAME STREQUAL "arm") set(CMAKE_SYSTEM_PROCESSOR armv7l) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf) + set(TOOLCHAIN "armv7-alpine-linux-musleabihf") + elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) set(TOOLCHAIN "armv6-alpine-linux-musleabihf") else() set(TOOLCHAIN "arm-linux-gnueabihf") endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf") + endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") set(CMAKE_SYSTEM_PROCESSOR aarch64) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) set(TOOLCHAIN "aarch64-alpine-linux-musl") - else() + elseif(LINUX) set(TOOLCHAIN "aarch64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu") + endif() + elseif(FREEBSD) + set(triple "aarch64-unknown-freebsd12") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armel") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + set(TOOLCHAIN "arm-linux-gnueabi") + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armv6") + set(CMAKE_SYSTEM_PROCESSOR armv6l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") + set(CMAKE_SYSTEM_PROCESSOR ppc64le) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) + set(TOOLCHAIN "powerpc64le-alpine-linux-musl") + else() + set(TOOLCHAIN "powerpc64le-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "riscv64") + set(CMAKE_SYSTEM_PROCESSOR riscv64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl) + set(TOOLCHAIN "riscv64-alpine-linux-musl") + else() + set(TOOLCHAIN "riscv64-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "s390x") + set(CMAKE_SYSTEM_PROCESSOR s390x) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl) + set(TOOLCHAIN "s390x-alpine-linux-musl") + else() + set(TOOLCHAIN "s390x-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x64") + set(CMAKE_SYSTEM_PROCESSOR x86_64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl) + set(TOOLCHAIN "x86_64-alpine-linux-musl") + elseif(LINUX) + set(TOOLCHAIN "x86_64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu") + endif() + elseif(FREEBSD) + set(triple "x86_64-unknown-freebsd12") + elseif(ILLUMOS) + set(TOOLCHAIN "x86_64-illumos") + elseif(HAIKU) + set(TOOLCHAIN "x86_64-unknown-haiku") endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") set(CMAKE_SYSTEM_PROCESSOR i686) - set(TOOLCHAIN "i686-linux-gnu") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + set(TOOLCHAIN "i586-alpine-linux-musl") + else() + set(TOOLCHAIN "i686-linux-gnu") + endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") + endif() else() - message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!") + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") +endif() + +if(DEFINED ENV{TOOLCHAIN}) + set(TOOLCHAIN $ENV{TOOLCHAIN}) endif() # Specify include paths -if(TARGET_ARCH_NAME STREQUAL "armel") - if(DEFINED TIZEN_TOOLCHAIN) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) +if(TIZEN) + function(find_toolchain_dir prefix) + # Dynamically find the version subdirectory + file(GLOB DIRECTORIES "${prefix}/*") + list(GET DIRECTORIES 0 FIRST_MATCH) + get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME) + + set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE) + endfunction() + + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + else() + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") endif() + + message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}") + + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++) + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) endif() -# add_compile_param - adds only new options without duplicates. -# arg0 - list with result options, arg1 - list with new options. -# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode. -macro(add_compile_param) - if(NOT ${ARGC} MATCHES "^(2|3)$") - message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.") - endif() - foreach(OPTION ${ARGV1}) - if(NOT ${ARGV0} MATCHES "${OPTION}($| )") - set(${ARGV0} "${${ARGV0}} ${OPTION}") - if(${ARGC} EQUAL "3") # CACHE FORCE mode - set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE) - endif() +if(ANDROID) + if(TARGET_ARCH_NAME STREQUAL "arm") + set(ANDROID_ABI armeabi-v7a) + elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(ANDROID_ABI arm64-v8a) endif() - endforeach() -endmacro() + + # extract platform number required by the NDK's toolchain + file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS) + string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}") + string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}") + + set(ANDROID_TOOLCHAIN clang) + set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository + set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib") + set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include") + + # include official NDK toolchain script + include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake) +elseif(FREEBSD) + # we cross-compile by instructing clang + set(CMAKE_C_COMPILER_TARGET ${triple}) + set(CMAKE_CXX_COMPILER_TARGET ${triple}) + set(CMAKE_ASM_COMPILER_TARGET ${triple}) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") +elseif(ILLUMOS) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + include_directories(SYSTEM ${CROSS_ROOTFS}/include) + + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + function(locate_toolchain_exec exec var) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) + endfunction() + + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") +elseif(HAIKU) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") + + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + function(locate_toolchain_exec exec var) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) + endfunction() + + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") + + # let CMake set up the correct search paths + include(Platform/Haiku) +else() + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") +endif() # Specify link flags -add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}") -add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr") -add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}") -add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold") - -if(TARGET_ARCH_NAME STREQUAL "armel") - if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only - add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") - add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib") - add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib") - add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + +function(add_toolchain_linker_flag Flag) + set(Config "${ARGV1}") + set(CONFIG_SUFFIX "") + if (NOT Config STREQUAL "") + set(CONFIG_SUFFIX "_${Config}") endif() -elseif(TARGET_ARCH_NAME STREQUAL "x86") - add_compile_param(CROSS_LINK_FLAGS "-m32") + set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) + set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) +endfunction() + +if(LINUX) + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") endif() -add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") -add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") -add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64)$") + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_toolchain_linker_flag("--target=${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") + endif() + add_toolchain_linker_flag(-m32) + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(ILLUMOS) + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib") +elseif(HAIKU) + add_toolchain_linker_flag("-lnetwork") + add_toolchain_linker_flag("-lroot") +endif() # Specify compile options -add_compile_options("--sysroot=${CROSS_ROOTFS}") -add_compile_options("--target=${TOOLCHAIN}") -add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr") -if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$") +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) @@ -90,20 +310,39 @@ endif() if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") add_compile_options(-mthumb) - add_compile_options(-mfpu=vfpv3) + if (NOT DEFINED CLR_ARM_FPU_TYPE) + set (CLR_ARM_FPU_TYPE vfpv3) + endif (NOT DEFINED CLR_ARM_FPU_TYPE) + + add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE}) + if (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + set (CLR_ARM_FPU_CAPABILITY 0x7) + endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + + add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY}) + + # persist variables across multiple try_compile passes + list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY) + if(TARGET_ARCH_NAME STREQUAL "armel") add_compile_options(-mfloat-abi=softfp) - if(DEFINED TIZEN_TOOLCHAIN) - add_compile_options(-Wno-deprecated-declarations) # compile-time option - add_compile_options(-D__extern_always_inline=inline) # compile-time option - endif() endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_compile_options(--target=${TOOLCHAIN}) + endif() add_compile_options(-m32) add_compile_options(-Wno-error=unused-command-line-argument) endif() -# Set LLDB include and library paths +if(TIZEN) + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$") + add_compile_options(-Wno-deprecated-declarations) # compile-time option + add_compile_options(-D__extern_always_inline=inline) # compile-time option + endif() +endif() + +# Set LLDB include and library paths for builds that need lldb. if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") if(TARGET_ARCH_NAME STREQUAL "x86") set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}") @@ -131,7 +370,6 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") endif() endif() -set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 index 435e76413..8fda30bdc 100644 --- a/eng/common/darc-init.ps1 +++ b/eng/common/darc-init.ps1 @@ -1,6 +1,6 @@ param ( $darcVersion = $null, - $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16', + $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16', $verbosity = 'minimal', $toolpath = $null ) diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index 06b653425..c305ae6bd 100755 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -2,11 +2,11 @@ source="${BASH_SOURCE[0]}" darcVersion='' -versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16' +versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16' verbosity='minimal' while [[ $# > 0 ]]; do - opt="$(echo "$1" | awk '{print tolower($0)}')" + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --darcversion) darcVersion=$2 @@ -53,7 +53,7 @@ fi function InstallDarcCli { local darc_cli_package_name="microsoft.dotnet.darc" - InitializeDotNetCli + InitializeDotNetCli true local dotnet_root=$_InitializeDotNetCli if [ -z "$toolpath" ]; then @@ -68,7 +68,7 @@ function InstallDarcCli { fi fi - local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" + local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" echo "Installing Darc CLI version $darcVersion..." echo "You may need to restart your command shell if this is the first dotnet tool you have installed." diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1 index ec3e739fe..811f0f717 100644 --- a/eng/common/dotnet-install.ps1 +++ b/eng/common/dotnet-install.ps1 @@ -1,28 +1,27 @@ [CmdletBinding(PositionalBinding=$false)] Param( - [string] $verbosity = "minimal", - [string] $architecture = "", - [string] $version = "Latest", - [string] $runtime = "dotnet", - [string] $RuntimeSourceFeed = "", - [string] $RuntimeSourceFeedKey = "" + [string] $verbosity = 'minimal', + [string] $architecture = '', + [string] $version = 'Latest', + [string] $runtime = 'dotnet', + [string] $RuntimeSourceFeed = '', + [string] $RuntimeSourceFeedKey = '' ) . $PSScriptRoot\tools.ps1 -$dotnetRoot = Join-Path $RepoRoot ".dotnet" +$dotnetRoot = Join-Path $RepoRoot '.dotnet' $installdir = $dotnetRoot try { - if ($architecture -and $architecture.Trim() -eq "x86") { - $installdir = Join-Path $installdir "x86" + if ($architecture -and $architecture.Trim() -eq 'x86') { + $installdir = Join-Path $installdir 'x86' } - InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey -} + InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey +} catch { - Write-Host $_ - Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index d259a274c..7e69e3a9e 100755 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -11,13 +11,15 @@ while [[ -h "$source" ]]; do done scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +. "$scriptroot/tools.sh" + version='Latest' architecture='' runtime='dotnet' runtimeSourceFeed='' runtimeSourceFeedKey='' while [[ $# > 0 ]]; do - opt="$(echo "$1" | awk '{print tolower($0)}')" + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in -version|-v) shift @@ -40,18 +42,49 @@ while [[ $# > 0 ]]; do runtimeSourceFeedKey="$1" ;; *) - echo "Invalid argument: $1" + Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1" exit 1 ;; esac shift done -. "$scriptroot/tools.sh" -dotnetRoot="$repo_root/.dotnet" +# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples +cpuname=$(uname -m) +case $cpuname in + arm64|aarch64) + buildarch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + buildarch=arm + fi + ;; + loongarch64) + buildarch=loongarch64 + ;; + amd64|x86_64) + buildarch=x64 + ;; + armv*l) + buildarch=arm + ;; + i[3-6]86) + buildarch=x86 + ;; + *) + echo "Unknown CPU $cpuname detected, treating it as x64" + buildarch=x64 + ;; +esac + +dotnetRoot="${repo_root}.dotnet" +if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then + dotnetRoot="$dotnetRoot/$architecture" +fi + InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { local exit_code=$? - echo "dotnet-install.sh failed (exit code '$exit_code')." >&2 + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2 ExitWithExitCode $exit_code } diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1 index eccbf9f1b..da09da4f1 100644 --- a/eng/common/enable-cross-org-publishing.ps1 +++ b/eng/common/enable-cross-org-publishing.ps1 @@ -2,5 +2,12 @@ param( [string] $token ) -Write-Host "##vso[task.setvariable variable=VSS_NUGET_ACCESSTOKEN]$token" -Write-Host "##vso[task.setvariable variable=VSS_NUGET_URI_PREFIXES]https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/" + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +# Write-PipelineSetVariable will no-op if a variable named $ci is not defined +# Since this script is only ever called in AzDO builds, just universally set it +$ci = $true + +Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false diff --git a/eng/common/generate-graph-files.ps1 b/eng/common/generate-graph-files.ps1 deleted file mode 100644 index b056e4c1a..000000000 --- a/eng/common/generate-graph-files.ps1 +++ /dev/null @@ -1,87 +0,0 @@ -Param( - [Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens - [Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed) - [Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed) - [Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created - [string] $darcVersion = '1.1.0-beta.19175.6', # darc's version - [string] $graphvizVersion = '2.38', # GraphViz version - [switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about - # toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies -) - -$ErrorActionPreference = "Stop" -. $PSScriptRoot\tools.ps1 - -Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1") - -function CheckExitCode ([string]$stage) -{ - $exitCode = $LASTEXITCODE - if ($exitCode -ne 0) { - Write-Host "Something failed in stage: '$stage'. Check for errors above. Exiting now..." - ExitWithExitCode $exitCode - } -} - -try { - Push-Location $PSScriptRoot - - Write-Host "Installing darc..." - . .\darc-init.ps1 -darcVersion $darcVersion - CheckExitCode "Running darc-init" - - $engCommonBaseDir = Join-Path $PSScriptRoot "native\" - $graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory - $nativeToolBaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external" - $installBin = Join-Path $graphvizInstallDir "bin" - - Write-Host "Installing dot..." - .\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose - - $darcExe = "$env:USERPROFILE\.dotnet\tools" - $darcExe = Resolve-Path "$darcExe\darc.exe" - - Create-Directory $outputFolder - - # Generate 3 graph descriptions: - # 1. Flat with coherency information - # 2. Graphviz (dot) file - # 3. Standard dependency graph - $graphVizFilePath = "$outputFolder\graphviz.txt" - $graphVizImageFilePath = "$outputFolder\graph.png" - $normalGraphFilePath = "$outputFolder\graph-full.txt" - $flatGraphFilePath = "$outputFolder\graph-flat.txt" - $baseOptions = @( "--github-pat", "$gitHubPat", "--azdev-pat", "$azdoPat", "--password", "$barToken" ) - - if ($includeToolset) { - Write-Host "Toolsets will be included in the graph..." - $baseOptions += @( "--include-toolset" ) - } - - Write-Host "Generating standard dependency graph..." - & "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath - CheckExitCode "Generating normal dependency graph" - - Write-Host "Generating flat dependency graph and graphviz file..." - & "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath - CheckExitCode "Generating flat and graphviz dependency graph" - - Write-Host "Generating graph image $graphVizFilePath" - $dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe" - & "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath" - CheckExitCode "Generating graphviz image" - - Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!" -} -catch { - if (!$includeToolset) { - Write-Host "This might be a toolset repo which includes only toolset dependencies. " -NoNewline -ForegroundColor Yellow - Write-Host "Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again..." -ForegroundColor Yellow - } - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - ExitWithExitCode 1 -} finally { - Pop-Location -} \ No newline at end of file diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 new file mode 100644 index 000000000..524aaa57f --- /dev/null +++ b/eng/common/generate-locproject.ps1 @@ -0,0 +1,189 @@ +Param( + [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here + [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json + [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one + [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally +) + +# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here: +# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +. $PSScriptRoot\pipeline-logging-functions.ps1 + +$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" +$exclusions = @{ Exclusions = @() } +if (Test-Path -Path $exclusionsFilePath) +{ + $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json +} + +Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work + +# Template files +$jsonFiles = @() +$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern +$jsonTemplateFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" + $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern + +$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them +if (-not $wxlFiles) { + $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files + if ($wxlEnFiles) { + $wxlFiles = @() + $wxlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } + } +} + +$macosHtmlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\.lproj\\.+\.html$" } # add installer HTML files +$macosHtmlFiles = @() +if ($macosHtmlEnFiles) { + $macosHtmlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $macosHtmlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } +} + +$xlfFiles = @() + +$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" +$langXlfFiles = @() +if ($allXlfFiles) { + $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf' + $firstLangCode = $Matches.1 + $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf" +} +$langXlfFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" + $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles + +$locJson = @{ + Projects = @( + @{ + LanguageSet = $LanguageSet + LocItems = @( + $locFiles | ForEach-Object { + $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) + { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') { + Remove-Item -Path $sourceFile + } + if ($continue) + { + if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" + } + } else { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnName" + OutputPath = $outputPath + } + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "WiX_CloneLanguages" + LssFiles = @( "wxl_loc.lss" ) + LocItems = @( + $wxlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if ($continue) + { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "VS_macOS_CloneLanguages" + LssFiles = @( ".\eng\common\loc\P22DotNetHtmlLocalization.lss" ) + LocItems = @( + $macosHtmlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + $lciFile = $sourceFile + ".lci" + if ($continue) { + $result = @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + if (Test-Path $lciFile -PathType Leaf) { + $result["LciFile"] = $lciFile + } + return $result + } + } + ) + } + ) +} + +$json = ConvertTo-Json $locJson -Depth 5 +Write-Host "LocProject.json generated:`n`n$json`n`n" +Pop-Location + +if (!$UseCheckedInLocProjectJson) { + New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json +} +else { + New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json + + if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { + Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." + + exit 1 + } + else { + Write-Host "Generated LocProject.json and current LocProject.json are identical." + } +} diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1 new file mode 100644 index 000000000..3e5c1c74a --- /dev/null +++ b/eng/common/generate-sbom-prep.ps1 @@ -0,0 +1,21 @@ +Param( + [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed +) + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +Write-Host "Creating dir $ManifestDirPath" +# create directory for sbom manifest to be placed +if (!(Test-Path -path $ManifestDirPath)) +{ + New-Item -ItemType Directory -path $ManifestDirPath + Write-Host "Successfully created directory $ManifestDirPath" +} +else{ + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +} + +Write-Host "Updating artifact name" +$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_' +Write-Host "Artifact name $artifact_name" +Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name" diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh new file mode 100755 index 000000000..d5c76dc82 --- /dev/null +++ b/eng/common/generate-sbom-prep.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +. $scriptroot/pipeline-logging-functions.sh + +manifest_dir=$1 + +if [ ! -d "$manifest_dir" ] ; then + mkdir -p "$manifest_dir" + echo "Sbom directory created." $manifest_dir +else + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +fi + +artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" +echo "Artifact name before : "$artifact_name +# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. +safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" +echo "Artifact name after : "$safe_artifact_name +export ARTIFACT_NAME=$safe_artifact_name +echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" + +exit 0 diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1 index 8cf18bcfe..27ccdb9ec 100644 --- a/eng/common/init-tools-native.ps1 +++ b/eng/common/init-tools-native.ps1 @@ -31,40 +31,46 @@ Wait time between retry attempts in seconds .PARAMETER GlobalJsonFile File path to global.json file +.PARAMETER PathPromotion +Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines) +or break the build if a native tool is not found on the path (on a local dev machine) + .NOTES #> [CmdletBinding(PositionalBinding=$false)] Param ( - [string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external", + [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external', [string] $InstallDirectory, [switch] $Clean = $False, [switch] $Force = $False, [int] $DownloadRetries = 5, [int] $RetryWaitTimeInSeconds = 30, - [string] $GlobalJsonFile + [string] $GlobalJsonFile, + [switch] $PathPromotion ) if (!$GlobalJsonFile) { - $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName "global.json" + $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json' } Set-StrictMode -version 2.0 -$ErrorActionPreference="Stop" +$ErrorActionPreference='Stop' -Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1") +. $PSScriptRoot\pipeline-logging-functions.ps1 +Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') try { # Define verbose switch if undefined - $Verbose = $VerbosePreference -Eq "Continue" + $Verbose = $VerbosePreference -Eq 'Continue' - $EngCommonBaseDir = Join-Path $PSScriptRoot "native\" + $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\' $NativeBaseDir = $InstallDirectory if (!$NativeBaseDir) { $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory } $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir - $InstallBin = Join-Path $NativeBaseDir "bin" - $InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1" + $InstallBin = Join-Path $NativeBaseDir 'bin' + $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1' # Process tools list Write-Host "Processing $GlobalJsonFile" @@ -74,56 +80,105 @@ try { } $NativeTools = Get-Content($GlobalJsonFile) -Raw | ConvertFrom-Json | - Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue + Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue if ($NativeTools) { - $NativeTools.PSObject.Properties | ForEach-Object { - $ToolName = $_.Name - $ToolVersion = $_.Value - $LocalInstallerArguments = @{ ToolName = "$ToolName" } - $LocalInstallerArguments += @{ InstallPath = "$InstallBin" } - $LocalInstallerArguments += @{ BaseUri = "$BaseUri" } - $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" } - $LocalInstallerArguments += @{ Version = "$ToolVersion" } - - if ($Verbose) { - $LocalInstallerArguments += @{ Verbose = $True } - } - if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { - if($Force) { - $LocalInstallerArguments += @{ Force = $True } - } - } - if ($Clean) { - $LocalInstallerArguments += @{ Clean = $True } - } - - Write-Verbose "Installing $ToolName version $ToolVersion" - Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'" - & $InstallerPath @LocalInstallerArguments - if ($LASTEXITCODE -Ne "0") { - $errMsg = "$ToolName installation failed" - if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) { - $showNativeToolsWarning = $true - if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) { - $showNativeToolsWarning = $false + if ($PathPromotion -eq $True) { + $ArcadeToolsDirectory = "$env:SYSTEMDRIVE\arcade-tools" + if (Test-Path $ArcadeToolsDirectory) { # if this directory exists, we should use native tools on machine + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $InstalledTools = @{} + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + if ($ToolVersion -eq "latest") { + $ToolVersion = "" + } + $ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending) + if ($ToolDirectories -eq $null) { + Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image." + exit 1 } - if ($showNativeToolsWarning) { - Write-Warning $errMsg + $ToolDirectory = $ToolDirectories[0] + $BinPathFile = "$($ToolDirectory.FullName)\binpath.txt" + if (-not (Test-Path -Path "$BinPathFile")) { + Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool." + exit 1 } - $toolInstallationFailure = $true - } else { - Write-Error $errMsg - exit 1 + $BinPath = Get-Content "$BinPathFile" + $ToolPath = Convert-Path -Path $BinPath + Write-Host "Adding $ToolName to the path ($ToolPath)..." + Write-Host "##vso[task.prependpath]$ToolPath" + $env:PATH = "$ToolPath;$env:PATH" + $InstalledTools += @{ $ToolName = $ToolDirectory.FullName } + } } + return $InstalledTools + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding." + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "If this is running on a build machine, the arcade-tools directory was not found, which means there's an error with the image." + } + } + exit 0 + } + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $LocalInstallerArguments = @{ ToolName = "$ToolName" } + $LocalInstallerArguments += @{ InstallPath = "$InstallBin" } + $LocalInstallerArguments += @{ BaseUri = "$BaseUri" } + $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" } + $LocalInstallerArguments += @{ Version = "$ToolVersion" } + + if ($Verbose) { + $LocalInstallerArguments += @{ Verbose = $True } + } + if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { + if($Force) { + $LocalInstallerArguments += @{ Force = $True } + } + } + if ($Clean) { + $LocalInstallerArguments += @{ Clean = $True } + } + + Write-Verbose "Installing $ToolName version $ToolVersion" + Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'" + & $InstallerPath @LocalInstallerArguments + if ($LASTEXITCODE -Ne "0") { + $errMsg = "$ToolName installation failed" + if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) { + $showNativeToolsWarning = $true + if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) { + $showNativeToolsWarning = $false + } + if ($showNativeToolsWarning) { + Write-Warning $errMsg + } + $toolInstallationFailure = $true + } else { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host $errMsg + exit 1 + } + } + } + + if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host 'Native tools bootstrap failed' + exit 1 } - } - - if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) { - exit 1 } } else { - Write-Host "No native tools defined in global.json" + Write-Host 'No native tools defined in global.json' exit 0 } @@ -131,17 +186,18 @@ try { exit 0 } if (Test-Path $InstallBin) { - Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin) + Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin) Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)" + return $InstallBin } - else { - Write-Error "Native tools install directory does not exist, installation failed" + elseif (-not ($PathPromotion)) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed' exit 1 } exit 0 } catch { - Write-Host $_ - Write-Host $_.Exception - exit 1 + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_ + ExitWithExitCode 1 } diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh index 4dafaaca1..3e6a8d6ac 100755 --- a/eng/common/init-tools-native.sh +++ b/eng/common/init-tools-native.sh @@ -10,12 +10,13 @@ force=false download_retries=5 retry_wait_time_seconds=30 global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json" -declare -A native_assets +declare -a native_assets +. $scriptroot/pipeline-logging-functions.sh . $scriptroot/native/common-library.sh while (($# > 0)); do - lowerI="$(echo $1 | awk '{print tolower($0)}')" + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" case $lowerI in --baseuri) base_uri=$2 @@ -33,6 +34,14 @@ while (($# > 0)); do force=true shift 1 ;; + --donotabortonfailure) + donotabortonfailure=true + shift 1 + ;; + --donotdisplaywarnings) + donotdisplaywarnings=true + shift 1 + ;; --downloadretries) download_retries=$2 shift 2 @@ -51,6 +60,8 @@ while (($# > 0)); do echo " - (default) %USERPROFILE%/.netcoreeng/native" echo "" echo " --clean Switch specifying not to install anything, but cleanup native asset folders" + echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure" + echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure" echo " --force Clean and then install tools" echo " --help Print help and exit" echo "" @@ -65,24 +76,89 @@ while (($# > 0)); do done function ReadGlobalJsonNativeTools { - # Get the native-tools section from the global.json. - local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/') - # Only extract the contents of the object. - local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}') - native_tools_list=${native_tools_list//[\" ]/} - native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' ) - - local old_IFS=$IFS - while read -r line; do - # Lines are of the form: 'tool:version' - IFS=: - while read -r key value; do - native_assets[$key]=$value - done <<< "$line" - done <<< "$native_tools_list" - IFS=$old_IFS - - return 0; + # happy path: we have a proper JSON parsing tool `jq(1)` in PATH! + if command -v jq &> /dev/null; then + + # jq: read each key/value pair under "native-tools" entry and emit: + # KEY="" VALUE="" + # followed by a null byte. + # + # bash: read line with null byte delimeter and push to array (for later `eval`uation). + + while IFS= read -rd '' line; do + native_assets+=("$line") + done < <(jq -r '. | + select(has("native-tools")) | + ."native-tools" | + keys[] as $k | + @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file") + + return + fi + + # Warning: falling back to manually parsing JSON, which is not recommended. + + # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above. + # It has been tested with several weird strings with escaped characters in entries (key and value) + # and results were compared with the output of jq(1) in binary representation using xxd(1); + # just before the assignment to 'native_assets' array (above and below). + + # try to capture the section under "native-tools". + if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then + return + fi + + section="${BASH_REMATCH[1]}" + + parseStarted=0 + possibleEnd=0 + escaping=0 + escaped=0 + isKey=1 + + for (( i=0; i<${#section}; i++ )); do + char="${section:$i:1}" + if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi + + if ! ((escaping)) && [[ "$char" == "\\" ]]; then + escaping=1 + elif ((escaping)) && ! ((escaped)); then + escaped=1 + fi + + if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then + parseStarted=1 + possibleEnd=0 + elif [[ "$char" == "'" ]]; then + token="$token'\\\''" + possibleEnd=0 + elif ((escaping)) || [[ "$char" != "\"" ]]; then + token="$token$char" + possibleEnd=1 + fi + + if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then + # Use printf to unescape token to match jq(1)'s @sh formatting rules. + # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed. + printf -v token "'$token'" + + if ((isKey)); then + KEY="$token" + isKey=0 + else + line="KEY=$KEY VALUE=$token" + native_assets+=("$line") + isKey=1 + fi + + # reset for next token + parseStarted=0 + token= + elif ((escaping)) && ((escaped)); then + escaping=0 + escaped=0 + fi + done } native_base_dir=$install_directory @@ -91,6 +167,7 @@ if [[ -z $install_directory ]]; then fi install_bin="${native_base_dir}/bin" +installed_any=false ReadGlobalJsonNativeTools @@ -99,14 +176,14 @@ if [[ ${#native_assets[@]} -eq 0 ]]; then exit 0; else native_installer_dir="$scriptroot/native" - for tool in "${!native_assets[@]}" - do - tool_version=${native_assets[$tool]} - installer_name="install-$tool.sh" - installer_command="$native_installer_dir/$installer_name" + for index in "${!native_assets[@]}"; do + eval "${native_assets["$index"]}" + + installer_path="$native_installer_dir/install-$KEY.sh" + installer_command="$installer_path" installer_command+=" --baseuri $base_uri" installer_command+=" --installpath $install_bin" - installer_command+=" --version $tool_version" + installer_command+=" --version $VALUE" echo $installer_command if [[ $force = true ]]; then @@ -117,11 +194,29 @@ else installer_command+=" --clean" fi - $installer_command - - if [[ $? != 0 ]]; then - echo "Execution Failed" >&2 - exit 1 + if [[ -a $installer_path ]]; then + $installer_command + if [[ $? != 0 ]]; then + if [[ $donotabortonfailure = true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + exit 1 + fi + else + $installed_any = true + fi + else + if [[ $donotabortonfailure == true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + exit 1 + fi fi done fi @@ -134,8 +229,10 @@ if [[ -d $install_bin ]]; then echo "Native tools are available from $install_bin" echo "##vso[task.prependpath]$install_bin" else - echo "Native tools install directory does not exist, installation failed" >&2 - exit 1 + if [[ $installed_any = true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed" + exit 1 + fi fi exit 0 diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1 index 8b8bafd6a..92b77347d 100644 --- a/eng/common/internal-feed-operations.ps1 +++ b/eng/common/internal-feed-operations.ps1 @@ -6,9 +6,8 @@ param( [switch] $IsFeedPrivate ) -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 - . $PSScriptRoot\tools.ps1 # Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed @@ -21,7 +20,7 @@ function SetupCredProvider { ) # Install the Cred Provider NuGet plugin - Write-Host "Setting up Cred Provider NuGet plugin in the agent..." + Write-Host 'Setting up Cred Provider NuGet plugin in the agent...' Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1' @@ -29,28 +28,28 @@ function SetupCredProvider { Write-Host "Writing the contents of 'installcredprovider.ps1' locally..." Invoke-WebRequest $url -OutFile installcredprovider.ps1 - Write-Host "Installing plugin..." + Write-Host 'Installing plugin...' .\installcredprovider.ps1 -Force Write-Host "Deleting local copy of 'installcredprovider.ps1'..." Remove-Item .\installcredprovider.ps1 if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) { - Write-Host "CredProvider plugin was not installed correctly!" + Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!' ExitWithExitCode 1 } else { - Write-Host "CredProvider plugin was installed correctly!" + Write-Host 'CredProvider plugin was installed correctly!' } # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable # feeds successfully - $nugetConfigPath = "$RepoRoot\NuGet.config" + $nugetConfigPath = Join-Path $RepoRoot "NuGet.config" if (-Not (Test-Path -Path $nugetConfigPath)) { - Write-Host "NuGet.config file not found in repo's root!" - ExitWithExitCode 1 + Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!' + ExitWithExitCode 1 } $endpoints = New-Object System.Collections.ArrayList @@ -64,7 +63,6 @@ function SetupCredProvider { } if (($endpoints | Measure-Object).Count -gt 0) { - # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}' $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress # Create the environment variables the AzDo way @@ -81,13 +79,13 @@ function SetupCredProvider { } else { - Write-Host "No internal endpoints found in NuGet.config" + Write-Host 'No internal endpoints found in NuGet.config' } } #Workaround for https://github.com/microsoft/msbuild/issues/4430 function InstallDotNetSdkAndRestoreArcade { - $dotnetTempDir = "$RepoRoot\dotnet" + $dotnetTempDir = Join-Path $RepoRoot "dotnet" $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) $dotnet = "$dotnetTempDir\dotnet.exe" $restoreProjPath = "$PSScriptRoot\restore.proj" @@ -99,7 +97,7 @@ function InstallDotNetSdkAndRestoreArcade { & $dotnet restore $restoreProjPath - Write-Host "Arcade SDK restored!" + Write-Host 'Arcade SDK restored!' if (Test-Path -Path $restoreProjPath) { Remove-Item $restoreProjPath @@ -113,23 +111,22 @@ function InstallDotNetSdkAndRestoreArcade { try { Push-Location $PSScriptRoot - if ($Operation -like "setup") { + if ($Operation -like 'setup') { SetupCredProvider $AuthToken } - elseif ($Operation -like "install-restore") { + elseif ($Operation -like 'install-restore') { InstallDotNetSdkAndRestoreArcade } else { - Write-Host "Unknown operation '$Operation'!" + Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!" ExitWithExitCode 1 } } catch { - Write-Host $_ - Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Arcade' -Message $_ ExitWithExitCode 1 } finally { - Pop-Location + Pop-Location } diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh index 1ff654d2f..9378223ba 100755 --- a/eng/common/internal-feed-operations.sh +++ b/eng/common/internal-feed-operations.sh @@ -30,7 +30,7 @@ function SetupCredProvider { rm installcredprovider.sh if [ ! -d "$HOME/.nuget/plugins" ]; then - echo "CredProvider plugin was not installed correctly!" + Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!' ExitWithExitCode 1 else echo "CredProvider plugin was installed correctly!" @@ -39,10 +39,10 @@ function SetupCredProvider { # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable # feeds successfully - local nugetConfigPath="$repo_root/NuGet.config" + local nugetConfigPath="{$repo_root}NuGet.config" if [ ! "$nugetConfigPath" ]; then - echo "NuGet.config file not found in repo's root!" + Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!" ExitWithExitCode 1 fi @@ -62,7 +62,6 @@ function SetupCredProvider { endpoints+=']' if [ ${#endpoints} -gt 2 ]; then - # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}' local endpointCredentials="{\"endpointCredentials\": "$endpoints"}" echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials" @@ -102,7 +101,7 @@ authToken='' repoName='' while [[ $# > 0 ]]; do - opt="$(echo "$1" | awk '{print tolower($0)}')" + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --operation) operation=$2 diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props index e33179ef3..dbf99d82a 100644 --- a/eng/common/internal/Directory.Build.props +++ b/eng/common/internal/Directory.Build.props @@ -1,4 +1,4 @@ - + diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config new file mode 100644 index 000000000..19d3d311b --- /dev/null +++ b/eng/common/internal/NuGet.config @@ -0,0 +1,7 @@ + + + + + + + diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj index 1a39a7ef3..7f5ce6d60 100644 --- a/eng/common/internal/Tools.csproj +++ b/eng/common/internal/Tools.csproj @@ -1,13 +1,16 @@ - - + net472 false + false + + + diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss new file mode 100644 index 000000000..5d892d619 --- /dev/null +++ b/eng/common/loc/P22DotNetHtmlLocalization.lss @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1 index b37fd3d5e..f041e5ddd 100644 --- a/eng/common/msbuild.ps1 +++ b/eng/common/msbuild.ps1 @@ -1,10 +1,12 @@ [CmdletBinding(PositionalBinding=$false)] Param( - [string] $verbosity = "minimal", + [string] $verbosity = 'minimal', [bool] $warnAsError = $true, [bool] $nodeReuse = $true, [switch] $ci, [switch] $prepareMachine, + [switch] $excludePrereleaseVS, + [string] $msbuildEngine = $null, [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs ) @@ -18,9 +20,8 @@ try { MSBuild @extraArgs } catch { - Write-Host $_ - Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh index 8160cd5a5..20d3dad54 100755 --- a/eng/common/msbuild.sh +++ b/eng/common/msbuild.sh @@ -19,7 +19,7 @@ prepare_machine=false extra_args='' while (($# > 0)); do - lowerI="$(echo $1 | awk '{print tolower($0)}')" + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" case $lowerI in --verbosity) verbosity=$2 diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 index 41416862d..ca38268c4 100644 --- a/eng/common/native/CommonLibrary.psm1 +++ b/eng/common/native/CommonLibrary.psm1 @@ -48,7 +48,7 @@ function DownloadAndExtract { -Verbose:$Verbose if ($DownloadStatus -Eq $False) { - Write-Error "Download failed" + Write-Error "Download failed from $Uri" return $False } @@ -145,9 +145,12 @@ function Get-File { New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null } + $TempPath = "$Path.tmp" if (Test-Path -IsValid -Path $Uri) { - Write-Verbose "'$Uri' is a file path, copying file to '$Path'" - Copy-Item -Path $Uri -Destination $Path + Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'" + Copy-Item -Path $Uri -Destination $TempPath + Write-Verbose "Moving temporary file to '$Path'" + Move-Item -Path $TempPath -Destination $Path return $? } else { @@ -157,8 +160,10 @@ function Get-File { while($Attempt -Lt $DownloadRetries) { try { - Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path - Write-Verbose "Downloaded to '$Path'" + Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath + Write-Verbose "Downloaded to temporary location '$TempPath'" + Move-Item -Path $TempPath -Destination $Path + Write-Verbose "Moved temporary file to '$Path'" return $True } catch { @@ -271,7 +276,8 @@ function Get-MachineArchitecture { } if (($ProcessorArchitecture -Eq "AMD64") -Or ($ProcessorArchitecture -Eq "IA64") -Or - ($ProcessorArchitecture -Eq "ARM64")) { + ($ProcessorArchitecture -Eq "ARM64") -Or + ($ProcessorArchitecture -Eq "LOONGARCH64")) { return "x64" } return "x86" @@ -359,16 +365,21 @@ function Expand-Zip { return $False } } - if (-Not (Test-Path $OutputDirectory)) { - New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null + + $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp" + if (Test-Path $TempOutputDirectory) { + Remove-Item $TempOutputDirectory -Force -Recurse } + New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null Add-Type -assembly "system.io.compression.filesystem" - [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory") + [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory") if ($? -Eq $False) { Write-Error "Unable to extract '$ZipPath'" return $False } + + Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory } catch { Write-Host $_ diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh index 271bddfac..080c2c283 100755 --- a/eng/common/native/common-library.sh +++ b/eng/common/native/common-library.sh @@ -34,7 +34,7 @@ function ExpandZip { echo "'Force flag enabled, but '$output_directory' exists. Removing directory" rm -rf $output_directory if [[ $? != 0 ]]; then - echo Unable to remove '$output_directory'>&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'" return 1 fi fi @@ -45,7 +45,7 @@ function ExpandZip { echo "Extracting archive" tar -xf $zip_path -C $output_directory if [[ $? != 0 ]]; then - echo "Unable to extract '$zip_path'" >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'" return 1 fi @@ -117,7 +117,7 @@ function DownloadAndExtract { # Download file GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds if [[ $? != 0 ]]; then - echo "Failed to download '$uri' to '$temp_tool_path'." >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'." return 1 fi @@ -125,7 +125,7 @@ function DownloadAndExtract { echo "extracting from $temp_tool_path to $installDir" ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds if [[ $? != 0 ]]; then - echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'." return 1 fi @@ -148,8 +148,12 @@ function NewScriptShim { fi if [[ ! -f $tool_file_path ]]; then - echo "Specified tool file path:'$tool_file_path' does not exist" >&2 - return 1 + # try to see if the path is lower cased + tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")" + if [[ ! -f $tool_file_path ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" + return 1 + fi fi local shim_contents=$'#!/usr/bin/env bash\n' diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh new file mode 100755 index 000000000..2d5660642 --- /dev/null +++ b/eng/common/native/init-compiler.sh @@ -0,0 +1,137 @@ +#!/bin/sh +# +# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables +# +# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! + +if [ -z "$build_arch" ] || [ -z "$compiler" ]; then + echo "Usage..." + echo "build_arch= compiler= init-compiler.sh" + echo "Specify the target architecture." + echo "Specify the name of compiler (clang or gcc)." + exit 1 +fi + +case "$compiler" in + clang*|-clang*|--clang*) + # clangx.y or clang-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + + if [ -z "$minorVersion" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -le 6 ]; then + minorVersion=0; + fi + compiler=clang + ;; + + gcc*|-gcc*|--gcc*) + # gccx.y or gcc-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + compiler=gcc + ;; +esac + +cxxCompiler="$compiler++" + +# clear the existing CC and CXX from environment +CC= +CXX= +LDFLAGS= + +if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi + +check_version_exists() { + desired_version=-1 + + # Set up the environment to be used for building with the desired compiler. + if command -v "$compiler-$1.$2" > /dev/null; then + desired_version="-$1.$2" + elif command -v "$compiler$1$2" > /dev/null; then + desired_version="$1$2" + elif command -v "$compiler-$1$2" > /dev/null; then + desired_version="-$1$2" + fi + + echo "$desired_version" +} + +if [ -z "$CLR_CC" ]; then + + # Set default versions + if [ -z "$majorVersion" ]; then + # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. + if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5" + elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi + + for version in $versions; do + _major="${version%%.*}" + [ -z "${version##*.*}" ] && _minor="${version#*.}" + desired_version="$(check_version_exists "$_major" "$_minor")" + if [ "$desired_version" != "-1" ]; then majorVersion="$_major"; break; fi + done + + if [ -z "$majorVersion" ]; then + if command -v "$compiler" > /dev/null; then + if [ "$(uname)" != "Darwin" ]; then + echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH." + fi + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" + else + echo "No usable version of $compiler found." + exit 1 + fi + else + if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then + if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then + if command -v "$compiler" > /dev/null; then + echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH." + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" + else + echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH." + exit 1 + fi + fi + fi + fi + else + desired_version="$(check_version_exists "$majorVersion" "$minorVersion")" + if [ "$desired_version" = "-1" ]; then + echo "Could not find specific version of $compiler: $majorVersion $minorVersion." + exit 1 + fi + fi + + if [ -z "$CC" ]; then + CC="$(command -v "$compiler$desired_version")" + CXX="$(command -v "$cxxCompiler$desired_version")" + if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi + fi +else + if [ ! -f "$CLR_CC" ]; then + echo "CLR_CC is set but path '$CLR_CC' does not exist" + exit 1 + fi + CC="$CLR_CC" + CXX="$CLR_CXX" +fi + +if [ -z "$CC" ]; then + echo "Unable to find $compiler." + exit 1 +fi + +# Only lld version >= 9 can be considered stable. lld doesn't support s390x. +if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then + if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then + LDFLAGS="-fuse-ld=lld" + fi +fi + +SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" + +export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh new file mode 100755 index 000000000..de1687b2c --- /dev/null +++ b/eng/common/native/init-distro-rid.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash + +# getNonPortableDistroRid +# +# Input: +# targetOs: (str) +# targetArch: (str) +# rootfsDir: (str) +# +# Return: +# non-portable rid +getNonPortableDistroRid() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="$3" + local nonPortableRid="" + + if [ "$targetOs" = "linux" ]; then + if [ -e "${rootfsDir}/etc/os-release" ]; then + source "${rootfsDir}/etc/os-release" + + if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then + # remove the last version digit + VERSION_ID="${VERSION_ID%.*}" + fi + + if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then + nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" + else + # Rolling release distros either do not set VERSION_ID, set it as blank or + # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux); + # so omit it here to be consistent with everything else. + nonPortableRid="${ID}-${targetArch}" + fi + + elif [ -e "${rootfsDir}/android_platform" ]; then + source "$rootfsDir"/android_platform + nonPortableRid="$RID" + fi + fi + + if [ "$targetOs" = "freebsd" ]; then + # $rootfsDir can be empty. freebsd-version is shell script and it should always work. + __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; }) + nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" + elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + __android_sdk_version=$(getprop ro.build.version.sdk) + nonPortableRid="android.$__android_sdk_version-${targetArch}" + elif [ "$targetOs" = "illumos" ]; then + __uname_version=$(uname -v) + case "$__uname_version" in + omnios-*) + __omnios_major_version=$(echo "${__uname_version:8:2}") + nonPortableRid=omnios."$__omnios_major_version"-"$targetArch" + ;; + joyent_*) + __smartos_major_version=$(echo "${__uname_version:7:4}") + nonPortableRid=smartos."$__smartos_major_version"-"$targetArch" + ;; + illumos_*) + nonPortableRid=openindiana-"$targetArch" + ;; + esac + elif [ "$targetOs" = "solaris" ]; then + __uname_version=$(uname -v) + __solaris_major_version=$(echo "${__uname_version%.*}") + nonPortableRid=solaris."$__solaris_major_version"-"$targetArch" + elif [ "$targetOs" = "haiku" ]; then + __uname_release=$(uname -r) + nonPortableRid=haiku.r"$__uname_release"-"$targetArch" + fi + + echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')" +} + +# initDistroRidGlobal +# +# Input: +# os: (str) +# arch: (str) +# rootfsDir?: (nullable:string) +# +# Return: +# None +# +# Notes: +# +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. +# +initDistroRidGlobal() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="" + if [ "$#" -ge 3 ]; then + rootfsDir="$3" + fi + + if [ -n "${rootfsDir}" ]; then + # We may have a cross build. Check for the existence of the rootfsDir + if [ ! -e "${rootfsDir}" ]; then + echo "Error rootfsDir has been passed, but the location is not valid." + exit 1 + fi + fi + + __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}") + + if [ -z "${__PortableTargetOS:-}" ]; then + __PortableTargetOS="$targetOs" + + STRINGS="$(command -v strings || true)" + if [ -z "$STRINGS" ]; then + STRINGS="$(command -v llvm-strings || true)" + fi + + # Check for musl-based distros (e.g Alpine Linux, Void Linux). + if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || + ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then + __PortableTargetOS="linux-musl" + fi + fi + + export __DistroRid __PortableTargetOS +} diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh new file mode 100755 index 000000000..e693617a6 --- /dev/null +++ b/eng/common/native/init-os-and-arch.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash + +# Use uname to determine what the OS is. +OSName=$(uname -s | tr '[:upper:]' '[:lower:]') + +if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + OSName="android" +fi + +case "$OSName" in +freebsd|linux|netbsd|openbsd|sunos|android|haiku) + os="$OSName" ;; +darwin) + os=osx ;; +*) + echo "Unsupported OS $OSName detected!" + exit 1 ;; +esac + +# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html +# and `uname -p` returns processor type (e.g. i386 on amd64). +# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html. +if [ "$os" = "sunos" ]; then + if uname -o 2>&1 | grep -q illumos; then + os="illumos" + else + os="solaris" + fi + CPUName=$(isainfo -n) +else + # For the rest of the operating systems, use uname(1) to determine what the CPU is. + CPUName=$(uname -m) +fi + +case "$CPUName" in + arm64|aarch64) + arch=arm64 + ;; + + loongarch64) + arch=loongarch64 + ;; + + riscv64) + arch=riscv64 + ;; + + amd64|x86_64) + arch=x64 + ;; + + armv7l|armv8l) + if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then + arch=armel + else + arch=arm + fi + ;; + + armv6l) + arch=armv6 + ;; + + i[3-6]86) + echo "Unsupported CPU $CPUName detected, build might not succeed!" + arch=x86 + ;; + + s390x) + arch=s390x + ;; + + ppc64le) + arch=ppc64le + ;; + *) + echo "Unknown CPU $CPUName detected!" + exit 1 + ;; +esac diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh index 53ddf4e68..8a5e7cf0d 100755 --- a/eng/common/native/install-cmake-test.sh +++ b/eng/common/native/install-cmake-test.sh @@ -14,7 +14,7 @@ download_retries=5 retry_wait_time_seconds=30 while (($# > 0)); do - lowerI="$(echo $1 | awk '{print tolower($0)}')" + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" case $lowerI in --baseuri) base_uri=$2 @@ -63,7 +63,7 @@ done tool_name="cmake-test" tool_os=$(GetCurrentOS) -tool_folder=$(echo $tool_os | awk '{print tolower($0)}') +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" tool_arch="x86_64" tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" tool_install_directory="$install_path/$tool_name/$version" @@ -101,7 +101,7 @@ fi DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds if [[ $? != 0 ]]; then - echo "Installation failed" >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' exit 1 fi @@ -110,8 +110,8 @@ fi NewScriptShim $shim_path $tool_file_path true if [[ $? != 0 ]]; then - echo "Shim generation failed" >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' exit 1 fi -exit 0 \ No newline at end of file +exit 0 diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh index 5f1a182fa..de496beeb 100755 --- a/eng/common/native/install-cmake.sh +++ b/eng/common/native/install-cmake.sh @@ -14,7 +14,7 @@ download_retries=5 retry_wait_time_seconds=30 while (($# > 0)); do - lowerI="$(echo $1 | awk '{print tolower($0)}')" + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" case $lowerI in --baseuri) base_uri=$2 @@ -63,7 +63,7 @@ done tool_name="cmake" tool_os=$(GetCurrentOS) -tool_folder=$(echo $tool_os | awk '{print tolower($0)}') +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" tool_arch="x86_64" tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" tool_install_directory="$install_path/$tool_name/$version" @@ -101,7 +101,7 @@ fi DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds if [[ $? != 0 ]]; then - echo "Installation failed" >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' exit 1 fi @@ -110,8 +110,8 @@ fi NewScriptShim $shim_path $tool_file_path true if [[ $? != 0 ]]; then - echo "Shim generation failed" >&2 + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' exit 1 fi -exit 0 \ No newline at end of file +exit 0 diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1 index 635ab3fd4..78f2d84a4 100644 --- a/eng/common/native/install-tool.ps1 +++ b/eng/common/native/install-tool.ps1 @@ -46,6 +46,8 @@ Param ( [int] $RetryWaitTimeInSeconds = 30 ) +. $PSScriptRoot\..\pipeline-logging-functions.ps1 + # Import common library modules Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1") @@ -93,7 +95,7 @@ try { -Verbose:$Verbose if ($InstallStatus -Eq $False) { - Write-Error "Installation failed" + Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping" exit 1 } } @@ -103,7 +105,7 @@ try { Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))" exit 1 } elseif (@($ToolFilePath).Length -Lt 1) { - Write-Error "$ToolName was not found in $ToolFilePath." + Write-Host "$ToolName was not found in $ToolInstallDirectory." exit 1 } @@ -117,14 +119,14 @@ try { -Verbose:$Verbose if ($GenerateShimStatus -Eq $False) { - Write-Error "Generate shim failed" + Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping" return 1 } exit 0 } catch { - Write-Host $_ - Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_ exit 1 } diff --git a/eng/common/performance/perfhelixpublish.proj b/eng/common/performance/perfhelixpublish.proj deleted file mode 100644 index e5826b532..000000000 --- a/eng/common/performance/perfhelixpublish.proj +++ /dev/null @@ -1,102 +0,0 @@ - - - - %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj) - --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP% - py -3 - %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe - %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe - $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd - %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts - %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline - %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj - %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe - %25%25 - %HELIX_WORKITEM_ROOT%\testResults.xml - - - - $HELIX_CORRELATION_PAYLOAD - $(BaseDirectory)/performance - - - - $HELIX_WORKITEM_PAYLOAD - $(BaseDirectory) - - - - $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj) - --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP - python3 - $(BaseDirectory)/Core_Root/corerun - $(BaseDirectory)/Baseline_Core_Root/corerun - $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh - $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts - $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline - $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj - $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet - %25 - $HELIX_WORKITEM_ROOT/testResults.xml - - - - --corerun $(CoreRun) - - - - --corerun $(BaselineCoreRun) - - - - $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments) - - - - $(WorkItemCommand) $(CliArguments) - - - - - %(Identity) - - - - - 5 - - - - - - - - - - - false - - - - - - $(WorkItemDirectory) - $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" - $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" - $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand) - 4:00 - - - - - - $(WorkItemDirectory) - $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)" - $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)" - $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults) - 4:00 - - - \ No newline at end of file diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1 deleted file mode 100644 index ec41965fc..000000000 --- a/eng/common/performance/performance-setup.ps1 +++ /dev/null @@ -1,106 +0,0 @@ -Param( - [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, - [string] $CoreRootDirectory, - [string] $BaselineCoreRootDirectory, - [string] $Architecture="x64", - [string] $Framework="netcoreapp5.0", - [string] $CompilationMode="Tiered", - [string] $Repository=$env:BUILD_REPOSITORY_NAME, - [string] $Branch=$env:BUILD_SOURCEBRANCH, - [string] $CommitSha=$env:BUILD_SOURCEVERSION, - [string] $BuildNumber=$env:BUILD_BUILDNUMBER, - [string] $RunCategories="coreclr corefx", - [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj", - [string] $Kind="micro", - [switch] $Internal, - [switch] $Compare, - [string] $Configurations="CompilationMode=$CompilationMode" -) - -$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance") -$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty) -$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty) - -$PayloadDirectory = (Join-Path $SourceDirectory "Payload") -$PerformanceDirectory = (Join-Path $PayloadDirectory "performance") -$WorkItemDirectory = (Join-Path $SourceDirectory "workitem") -$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" -$Creator = $env:BUILD_DEFINITIONNAME -$PerfLabArguments = "" -$HelixSourcePrefix = "pr" - -$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open" - -if ($Framework.StartsWith("netcoreapp")) { - $Queue = "Windows.10.Amd64.ClientRS5.Open" -} - -if ($Compare) { - $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open" - $PerfLabArguments = "" - $ExtraBenchmarkDotNetArguments = "" -} - -if ($Internal) { - $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" - $PerfLabArguments = "--upload-to-perflab-container" - $ExtraBenchmarkDotNetArguments = "" - $Creator = "" - $HelixSourcePrefix = "official" -} - -$CommonSetupArguments="--frameworks $Framework --queue $Queue --build-number $BuildNumber --build-configs $Configurations" -$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments" - -if ($RunFromPerformanceRepo) { - $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments" - - robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git -} -else { - git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory -} - -if ($UseCoreRun) { - $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root") - Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot -} -if ($UseBaselineCoreRun) { - $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root") - Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot -} - -$DocsDir = (Join-Path $PerformanceDirectory "docs") -robocopy $DocsDir $WorkItemDirectory - -# Set variables that we will need to have in future steps -$ci = $true - -. "$PSScriptRoot\..\pipeline-logging-functions.ps1" - -# Directories -Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false - -# Script Arguments -Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false - -# Helix Arguments -Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false - -exit 0 \ No newline at end of file diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh deleted file mode 100755 index 2f2092166..000000000 --- a/eng/common/performance/performance-setup.sh +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env bash - -source_directory=$BUILD_SOURCESDIRECTORY -core_root_directory= -baseline_core_root_directory= -architecture=x64 -framework=netcoreapp5.0 -compilation_mode=tiered -repository=$BUILD_REPOSITORY_NAME -branch=$BUILD_SOURCEBRANCH -commit_sha=$BUILD_SOURCEVERSION -build_number=$BUILD_BUILDNUMBER -internal=false -compare=false -kind="micro" -run_categories="coreclr corefx" -csproj="src\benchmarks\micro\MicroBenchmarks.csproj" -configurations= -run_from_perf_repo=false -use_core_run=true -use_baseline_core_run=true - -while (($# > 0)); do - lowerI="$(echo $1 | awk '{print tolower($0)}')" - case $lowerI in - --sourcedirectory) - source_directory=$2 - shift 2 - ;; - --corerootdirectory) - core_root_directory=$2 - shift 2 - ;; - --baselinecorerootdirectory) - baseline_core_root_directory=$2 - shift 2 - ;; - --architecture) - architecture=$2 - shift 2 - ;; - --framework) - framework=$2 - shift 2 - ;; - --compilationmode) - compilation_mode=$2 - shift 2 - ;; - --repository) - repository=$2 - shift 2 - ;; - --branch) - branch=$2 - shift 2 - ;; - --commitsha) - commit_sha=$2 - shift 2 - ;; - --buildnumber) - build_number=$2 - shift 2 - ;; - --kind) - kind=$2 - shift 2 - ;; - --runcategories) - run_categories=$2 - shift 2 - ;; - --csproj) - csproj=$2 - shift 2 - ;; - --internal) - internal=true - shift 1 - ;; - --compare) - compare=true - shift 1 - ;; - --configurations) - configurations=$2 - shift 2 - ;; - --help) - echo "Common settings:" - echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun" - echo " --architecture Architecture of the testing being run" - echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure." - echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\"" - echo " --help Print help and exit" - echo "" - echo "Advanced settings:" - echo " --framework The framework to run, if not running in master" - echo " --compliationmode The compilation mode if not passing --configurations" - echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY" - echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME" - echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH" - echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION" - echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER" - echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj" - echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro" - echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\"" - echo " --internal If the benchmarks are running as an official job." - echo "" - exit 0 - ;; - esac -done - -if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then - run_from_perf_repo=true -fi - -if [ -z "$configurations" ]; then - configurations="CompliationMode=$compilation_mode" -fi - -if [ -z "$core_root_directory" ]; then - use_core_run=false -fi - -if [ -z "$baseline_core_root_directory" ]; then - use_baseline_core_run=false -fi - -payload_directory=$source_directory/Payload -performance_directory=$payload_directory/performance -workitem_directory=$source_directory/workitem -extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" -perflab_arguments= -queue=Ubuntu.1804.Amd64.Open -creator=$BUILD_DEFINITIONNAME -helix_source_prefix="pr" - -if [[ "$compare" == true ]]; then - extra_benchmark_dotnet_arguments= - perflab_arguments= - - # No open queues for arm64 - if [[ "$architecture" = "arm64" ]]; then - echo "Compare not available for arm64" - exit 1 - fi - - queue=Ubuntu.1804.Amd64.Tiger.Perf.Open -fi - -if [[ "$internal" == true ]]; then - perflab_arguments="--upload-to-perflab-container" - helix_source_prefix="official" - creator= - extra_benchmark_dotnet_arguments= - - if [[ "$architecture" = "arm64" ]]; then - queue=Ubuntu.1804.Arm64.Perf - else - queue=Ubuntu.1804.Amd64.Tiger.Perf - fi -fi - -common_setup_arguments="--frameworks $framework --queue $queue --build-number $build_number --build-configs $configurations" -setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments" - -if [[ "$run_from_perf_repo" = true ]]; then - payload_directory= - workitem_directory=$source_directory - performance_directory=$workitem_directory - setup_arguments="--perf-hash $commit_sha $common_setup_arguments" -else - git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory - - docs_directory=$performance_directory/docs - mv $docs_directory $workitem_directory -fi - -if [[ "$use_core_run" = true ]]; then - new_core_root=$payload_directory/Core_Root - mv $core_root_directory $new_core_root -fi - -if [[ "$use_baseline_core_run" = true ]]; then - new_baseline_core_root=$payload_directory/Baseline_Core_Root - mv $baseline_core_root_directory $new_baseline_core_root -fi - -ci=true - -_script_dir=$(pwd)/eng/common -. "$_script_dir/pipeline-logging-functions.sh" - -# Make sure all of our variables are available for future steps -Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false -Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false -Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false -Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false -Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "Python" -value "$python3" -is_multi_job_variable false -Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false -Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false -Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false -Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false -Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false -Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false -Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false -Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1 index af5f48aac..8e422c561 100644 --- a/eng/common/pipeline-logging-functions.ps1 +++ b/eng/common/pipeline-logging-functions.ps1 @@ -12,6 +12,7 @@ $script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%" # TODO: BUG: Escape % ??? # TODO: Add test to verify don't need to escape "=". +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set function Write-PipelineTelemetryError { [CmdletBinding()] param( @@ -25,80 +26,101 @@ function Write-PipelineTelemetryError { [string]$SourcePath, [string]$LineNumber, [string]$ColumnNumber, - [switch]$AsOutput) + [switch]$AsOutput, + [switch]$Force) - $PSBoundParameters.Remove("Category") | Out-Null + $PSBoundParameters.Remove('Category') | Out-Null + if ($Force -Or ((Test-Path variable:ci) -And $ci)) { $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message" - $PSBoundParameters.Remove("Message") | Out-Null - $PSBoundParameters.Add("Message", $Message) - - Write-PipelineTaskError @PSBoundParameters + } + $PSBoundParameters.Remove('Message') | Out-Null + $PSBoundParameters.Add('Message', $Message) + Write-PipelineTaskError @PSBoundParameters } +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set function Write-PipelineTaskError { [CmdletBinding()] param( - [Parameter(Mandatory = $true)] - [string]$Message, - [Parameter(Mandatory = $false)] - [string]$Type = 'error', - [string]$ErrCode, - [string]$SourcePath, - [string]$LineNumber, - [string]$ColumnNumber, - [switch]$AsOutput) - - if(!$ci) { - if($Type -eq 'error') { - Write-Host $Message -ForegroundColor Red - return + [Parameter(Mandatory = $true)] + [string]$Message, + [Parameter(Mandatory = $false)] + [string]$Type = 'error', + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput, + [switch]$Force + ) + + if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) { + if ($Type -eq 'error') { + Write-Host $Message -ForegroundColor Red + return } elseif ($Type -eq 'warning') { - Write-Host $Message -ForegroundColor Yellow - return + Write-Host $Message -ForegroundColor Yellow + return } - } - - if(($Type -ne 'error') -and ($Type -ne 'warning')) { + } + + if (($Type -ne 'error') -and ($Type -ne 'warning')) { Write-Host $Message return - } - if(-not $PSBoundParameters.ContainsKey('Type')) { + } + $PSBoundParameters.Remove('Force') | Out-Null + if (-not $PSBoundParameters.ContainsKey('Type')) { $PSBoundParameters.Add('Type', 'error') - } - Write-LogIssue @PSBoundParameters - } + } + Write-LogIssue @PSBoundParameters +} - function Write-PipelineSetVariable { +function Write-PipelineSetVariable { [CmdletBinding()] param( - [Parameter(Mandatory = $true)] - [string]$Name, - [string]$Value, - [switch]$Secret, - [switch]$AsOutput, - [bool]$IsMultiJobVariable=$true) - - if($ci) { + [Parameter(Mandatory = $true)] + [string]$Name, + [string]$Value, + [switch]$Secret, + [switch]$AsOutput, + [bool]$IsMultiJobVariable = $true) + + if ((Test-Path variable:ci) -And $ci) { Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{ - 'variable' = $Name - 'isSecret' = $Secret - 'isOutput' = $IsMultiJobVariable + 'variable' = $Name + 'isSecret' = $Secret + 'isOutput' = $IsMultiJobVariable } -AsOutput:$AsOutput - } - } + } +} - function Write-PipelinePrependPath { +function Write-PipelinePrependPath { [CmdletBinding()] param( - [Parameter(Mandatory=$true)] - [string]$Path, - [switch]$AsOutput) - if($ci) { + [Parameter(Mandatory = $true)] + [string]$Path, + [switch]$AsOutput) + + if ((Test-Path variable:ci) -And $ci) { Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput - } - } + } +} + +function Write-PipelineSetResult { + [CmdletBinding()] + param( + [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")] + [Parameter(Mandatory = $true)] + [string]$Result, + [string]$Message) + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{ + 'result' = $Result + } + } +} <######################################## # Private functions. @@ -115,7 +137,8 @@ function Format-LoggingCommandData { foreach ($mapping in $script:loggingCommandEscapeMappings) { $Value = $Value.Replace($mapping.Token, $mapping.Replacement) } - } else { + } + else { for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) { $mapping = $script:loggingCommandEscapeMappings[$i] $Value = $Value.Replace($mapping.Replacement, $mapping.Token) @@ -148,7 +171,8 @@ function Format-LoggingCommand { if ($first) { $null = $sb.Append(' ') $first = $false - } else { + } + else { $null = $sb.Append(';') } @@ -185,7 +209,8 @@ function Write-LoggingCommand { $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties if ($AsOutput) { $command - } else { + } + else { Write-Host $command } } @@ -204,12 +229,12 @@ function Write-LogIssue { [switch]$AsOutput) $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{ - 'type' = $Type - 'code' = $ErrCode - 'sourcepath' = $SourcePath - 'linenumber' = $LineNumber - 'columnnumber' = $ColumnNumber - } + 'type' = $Type + 'code' = $ErrCode + 'sourcepath' = $SourcePath + 'linenumber' = $LineNumber + 'columnnumber' = $ColumnNumber + } if ($AsOutput) { return $command } @@ -221,7 +246,8 @@ function Write-LogIssue { $foregroundColor = [System.ConsoleColor]::Red $backgroundColor = [System.ConsoleColor]::Black } - } else { + } + else { $foregroundColor = $host.PrivateData.WarningForegroundColor $backgroundColor = $host.PrivateData.WarningBackgroundColor if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { @@ -231,4 +257,4 @@ function Write-LogIssue { } Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor -} \ No newline at end of file +} diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh index 1c560a506..6a0b2255e 100755 --- a/eng/common/pipeline-logging-functions.sh +++ b/eng/common/pipeline-logging-functions.sh @@ -2,15 +2,19 @@ function Write-PipelineTelemetryError { local telemetry_category='' + local force=false local function_args=() local message='' while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')" + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -category|-c) telemetry_category=$2 shift ;; + -force|-f) + force=true + ;; -*) function_args+=("$1 $2") shift @@ -22,31 +26,29 @@ function Write-PipelineTelemetryError { shift done - if [[ "$ci" != true ]]; then + if [[ $force != true ]] && [[ "$ci" != true ]]; then echo "$message" >&2 return fi + if [[ $force == true ]]; then + function_args+=("-force") + fi message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message" function_args+=("$message") - - Write-PipelineTaskError $function_args + Write-PipelineTaskError ${function_args[@]} } function Write-PipelineTaskError { - if [[ "$ci" != true ]]; then - echo "$@" >&2 - return - fi - local message_type="error" local sourcepath='' local linenumber='' local columnnumber='' local error_code='' + local force=false while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')" + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -type|-t) message_type=$2 @@ -68,6 +70,9 @@ function Write-PipelineTaskError { error_code=$2 shift ;; + -force|-f) + force=true + ;; *) break ;; @@ -76,6 +81,11 @@ function Write-PipelineTaskError { shift done + if [[ $force != true ]] && [[ "$ci" != true ]]; then + echo "$@" >&2 + return + fi + local message="##vso[task.logissue" message="$message type=$message_type" @@ -112,7 +122,7 @@ function Write-PipelineSetVariable { local is_multi_job_variable=true while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')" + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -name|-n) name=$2 @@ -154,7 +164,7 @@ function Write-PipelinePrependPath { local prepend_path='' while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')" + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -path|-p) prepend_path=$2 @@ -169,4 +179,28 @@ function Write-PipelinePrependPath { if [[ "$ci" == true ]]; then echo "##vso[task.prependpath]$prepend_path" fi -} \ No newline at end of file +} + +function Write-PipelineSetResult { + local result='' + local message='' + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -result|-r) + result=$2 + shift + ;; + -message|-m) + message=$2 + shift + ;; + esac + shift + done + + if [[ "$ci" == true ]]; then + echo "##vso[task.complete result=$result;]$message" + fi +} diff --git a/eng/common/post-build/promote-build.ps1 b/eng/common/post-build/add-build-to-channel.ps1 similarity index 56% rename from eng/common/post-build/promote-build.ps1 rename to eng/common/post-build/add-build-to-channel.ps1 index e5ae85f25..49938f0c8 100644 --- a/eng/common/post-build/promote-build.ps1 +++ b/eng/common/post-build/add-build-to-channel.ps1 @@ -2,26 +2,26 @@ param( [Parameter(Mandatory=$true)][int] $BuildId, [Parameter(Mandatory=$true)][int] $ChannelId, [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com", - [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16" + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', + [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' ) -. $PSScriptRoot\post-build-utils.ps1 - try { + . $PSScriptRoot\post-build-utils.ps1 + # Check that the channel we are going to promote the build to exist $channelInfo = Get-MaestroChannel -ChannelId $ChannelId if (!$channelInfo) { - Write-Host "Channel with BAR ID $ChannelId was not found in BAR!" + Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!" ExitWithExitCode 1 } - # Get info about which channels the build has already been promoted to + # Get info about which channel(s) the build has already been promoted to $buildInfo = Get-MaestroBuild -BuildId $BuildId if (!$buildInfo) { - Write-Host "Build with BAR ID $BuildId was not found in BAR!" + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!" ExitWithExitCode 1 } @@ -39,10 +39,10 @@ try { Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId - Write-Host "done." + Write-Host 'done.' } catch { - Write-Host "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'" Write-Host $_ - Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'" + ExitWithExitCode 1 } diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1 new file mode 100644 index 000000000..63f3464c9 --- /dev/null +++ b/eng/common/post-build/check-channel-consistency.ps1 @@ -0,0 +1,40 @@ +param( + [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to + [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + if ($PromoteToChannels -eq "") { + Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." + ExitWithExitCode 0 + } + + # Check that every channel that Maestro told to promote the build to + # is available in YAML + $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ } + + $hasErrors = $false + + foreach ($id in $PromoteToChannelsIds) { + if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) { + Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng." + $hasErrors = $true + } + } + + # The `Write-PipelineTaskError` doesn't error the script and we might report several errors + # in the previous lines. The check below makes sure that we return an error state from the + # script if we reported any validation error + if ($hasErrors) { + ExitWithExitCode 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration." + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/darc-gather-drop.ps1 b/eng/common/post-build/darc-gather-drop.ps1 deleted file mode 100644 index 89854d3c1..000000000 --- a/eng/common/post-build/darc-gather-drop.ps1 +++ /dev/null @@ -1,45 +0,0 @@ -param( - [Parameter(Mandatory=$true)][int] $BarBuildId, # ID of the build which assets should be downloaded - [Parameter(Mandatory=$true)][string] $DropLocation, # Where the assets should be downloaded to - [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, # Token used to access Maestro API - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com", # Maestro API URL - [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16" # Version of Maestro API to use -) - -. $PSScriptRoot\post-build-utils.ps1 - -try { - Write-Host "Installing DARC ..." - - . $PSScriptRoot\..\darc-init.ps1 - $exitCode = $LASTEXITCODE - - if ($exitCode -ne 0) { - Write-PipelineTaskError "Something failed while running 'darc-init.ps1'. Check for errors above. Exiting now..." - ExitWithExitCode $exitCode - } - - # For now, only use a dry run. - # Ideally we would change darc to enable a quick request that - # would check whether the file exists that you can download it, - # and that it won't conflict with other files. - # https://github.com/dotnet/arcade/issues/3674 - # Right now we can't remove continue-on-error because we ocassionally will have - # dependencies that have no associated builds (e.g. an old dependency). - # We need to add an option to baseline specific dependencies away, or add them manually - # to the BAR. - darc gather-drop --non-shipping ` - --dry-run ` - --continue-on-error ` - --id $BarBuildId ` - --output-dir $DropLocation ` - --bar-uri $MaestroApiEndpoint ` - --password $MaestroApiAccessToken ` - --latest-location -} -catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1 index 78ed0d540..dab3534ab 100644 --- a/eng/common/post-build/nuget-validation.ps1 +++ b/eng/common/post-build/nuget-validation.ps1 @@ -6,20 +6,19 @@ param( [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to ) -. $PSScriptRoot\post-build-utils.ps1 - try { - $url = "https://raw.githubusercontent.com/NuGet/NuGetGallery/jver-verify/src/VerifyMicrosoftPackage/verify.ps1" + . $PSScriptRoot\post-build-utils.ps1 + + $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1' - New-Item -ItemType "directory" -Path ${ToolDestinationPath} -Force + New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1 & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg } catch { - Write-PipelineTaskError "NuGet package validation failed. Please check error logs." - Write-Host $_ Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1 index 551ae113f..534f6988d 100644 --- a/eng/common/post-build/post-build-utils.ps1 +++ b/eng/common/post-build/post-build-utils.ps1 @@ -1,16 +1,17 @@ # Most of the functions in this file require the variables `MaestroApiEndPoint`, # `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available. -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 # `tools.ps1` checks $ci to perform some actions. Since the post-build # scripts don't necessarily execute in the same agent that run the # build.ps1/sh script this variable isn't automatically set. $ci = $true +$disableConfigureToolsetImport = $true . $PSScriptRoot\..\tools.ps1 -function Create-MaestroApiRequestHeaders([string]$ContentType = "application/json") { +function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') { Validate-MaestroVars $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' @@ -50,40 +51,40 @@ function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) { return $result } -function Trigger-Subscription([string]$SubscriptionId) { +function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) { Validate-MaestroVars $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion" - Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null + $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion" + Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null } -function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) { +function Trigger-Subscription([string]$SubscriptionId) { Validate-MaestroVars $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion" - Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null + $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion" + Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null } function Validate-MaestroVars { try { - Get-Variable MaestroApiEndPoint -Scope Global | Out-Null - Get-Variable MaestroApiVersion -Scope Global | Out-Null - Get-Variable MaestroApiAccessToken -Scope Global | Out-Null + Get-Variable MaestroApiEndPoint | Out-Null + Get-Variable MaestroApiVersion | Out-Null + Get-Variable MaestroApiAccessToken | Out-Null - if (!($MaestroApiEndPoint -Match "^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$")) { - Write-PipelineTaskError "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'" + if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) { + Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'" ExitWithExitCode 1 } - if (!($MaestroApiVersion -Match "^[0-9]{4}-[0-9]{2}-[0-9]{2}$")) { - Write-PipelineTaskError "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'" + if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) { + Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'" ExitWithExitCode 1 } } catch { - Write-PipelineTaskError "Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script." + Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.' Write-Host $_ ExitWithExitCode 1 } diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 new file mode 100644 index 000000000..238945cb5 --- /dev/null +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -0,0 +1,53 @@ +param( + [Parameter(Mandatory=$true)][int] $BuildId, + [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, + [Parameter(Mandatory=$true)][string] $AzdoToken, + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', + [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, + [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, + [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + $darc = Get-Darc + + $optionalParams = [System.Collections.ArrayList]::new() + + if ("" -ne $ArtifactsPublishingAdditionalParameters) { + $optionalParams.Add("--artifact-publishing-parameters") | Out-Null + $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null + } + + if ("" -ne $SymbolPublishingAdditionalParameters) { + $optionalParams.Add("--symbol-publishing-parameters") | Out-Null + $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null + } + + if ("false" -eq $WaitPublishingFinish) { + $optionalParams.Add("--no-wait") | Out-Null + } + + & $darc add-build-to-channel ` + --id $buildId ` + --publishing-infra-version $PublishingInfraVersion ` + --default-channels ` + --source-branch main ` + --azdev-pat "$AzdoToken" ` + --bar-uri "$MaestroApiEndPoint" ` + --ci ` + @optionalParams + + if ($LastExitCode -ne 0) { + Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..." + exit 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/setup-maestro-vars.ps1 b/eng/common/post-build/setup-maestro-vars.ps1 deleted file mode 100644 index d7f64dc63..000000000 --- a/eng/common/post-build/setup-maestro-vars.ps1 +++ /dev/null @@ -1,26 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $ReleaseConfigsPath # Full path to ReleaseConfigs.txt asset -) - -. $PSScriptRoot\post-build-utils.ps1 - -try { - $Content = Get-Content $ReleaseConfigsPath - - $BarId = $Content | Select -Index 0 - - $Channels = "" - $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," } - - $IsStableBuild = $Content | Select -Index 2 - - Write-PipelineSetVariable -Name 'BARBuildId' -Value $BarId - Write-PipelineSetVariable -Name 'InitialChannels' -Value "$Channels" - Write-PipelineSetVariable -Name 'IsStableBuild' -Value $IsStableBuild -} -catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1 index bbfdacca1..4011d324e 100644 --- a/eng/common/post-build/sourcelink-validation.ps1 +++ b/eng/common/post-build/sourcelink-validation.ps1 @@ -14,11 +14,19 @@ param( $global:RepoFiles = @{} # Maximum number of jobs to run in parallel -$MaxParallelJobs = 6 +$MaxParallelJobs = 16 + +$MaxRetries = 5 +$RetryWaitTimeInSeconds = 30 # Wait time between check for system load $SecondsBetweenLoadChecks = 10 +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No files to validate." + ExitWithExitCode 0 +} + $ValidatePackage = { param( [string] $PackagePath # Full path to a Symbols.NuGet package @@ -29,14 +37,17 @@ $ValidatePackage = { # Ensure input file exist if (!(Test-Path $PackagePath)) { Write-Host "Input file does not exist: $PackagePath" - return 1 + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } } # Extensions for which we'll look for SourceLink information # For now we'll only care about Portable & Embedded PDBs - $RelevantExtensions = @(".dll", ".exe", ".pdb") + $RelevantExtensions = @('.dll', '.exe', '.pdb') - Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... " + Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...' $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId @@ -58,8 +69,11 @@ $ValidatePackage = { $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName # We ignore resource DLLs - if ($FileName.EndsWith(".resources.dll")) { - return + if ($FileName.EndsWith('.resources.dll')) { + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } } [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) @@ -91,36 +105,59 @@ $ValidatePackage = { $Status = 200 $Cache = $using:RepoFiles - if ( !($Cache.ContainsKey($FilePath)) ) { - try { - $Uri = $Link -as [System.URI] - - # Only GitHub links are valid - if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match "github" -or $Uri.Host -match "githubusercontent")) { - $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + $attempts = 0 + + while ($attempts -lt $using:MaxRetries) { + if ( !($Cache.ContainsKey($FilePath)) ) { + try { + $Uri = $Link -as [System.URI] + + if ($Link -match "submodules") { + # Skip submodule links until sourcelink properly handles submodules + $Status = 200 + } + elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { + # Only GitHub links are valid + $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + } + else { + # If it's not a github link, we want to break out of the loop and not retry. + $Status = 0 + $attempts = $using:MaxRetries + } } - else { + catch { + Write-Host $_ $Status = 0 } } - catch { - write-host $_ - $Status = 0 - } - } - if ($Status -ne 200) { - if ($NumFailedLinks -eq 0) { - if ($FailedFiles.Value -eq 0) { - Write-Host + if ($Status -ne 200) { + $attempts++ + + if ($attempts -lt $using:MaxRetries) + { + $attemptsLeft = $using:MaxRetries - $attempts + Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $using:RetryWaitTimeInSeconds + } + else { + if ($NumFailedLinks -eq 0) { + if ($FailedFiles.Value -eq 0) { + Write-Host + } + + Write-Host "`tFile $RealPath has broken links:" + } + + Write-Host "`t`tFailed to retrieve $Link" + + $NumFailedLinks++ } - - Write-Host "`tFile $RealPath has broken links:" } - - Write-Host "`t`tFailed to retrieve $Link" - - $NumFailedLinks++ + else { + break + } } } } @@ -136,26 +173,45 @@ $ValidatePackage = { } } catch { - + Write-Host $_ } finally { $zip.Dispose() } if ($FailedFiles -eq 0) { - Write-Host "Passed." - return 0 + Write-Host 'Passed.' + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } } else { - Write-Host "$PackagePath has broken SourceLink links." - return 1 + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links." + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$ValidationFailures, + [switch]$logErrors) { + if ($result -ne '0') { + if ($logErrors) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links." + } + $ValidationFailures.Value++ } } function ValidateSourceLinkLinks { - if ($GHRepoName -ne "" -and !($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) { - if (!($GHRepoName -Match "^[^\s-]+-[^\s]+$")) { - Write-PipelineTaskError "GHRepoName should be in the format / or -. '$GHRepoName'" + if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) { + if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'" ExitWithExitCode 1 } else { @@ -163,14 +219,14 @@ function ValidateSourceLinkLinks { } } - if ($GHCommit -ne "" -and !($GHCommit -Match "^[0-9a-fA-F]{40}$")) { - Write-PipelineTaskError "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'" + if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'" ExitWithExitCode 1 } - if ($GHRepoName -ne "" -and $GHCommit -ne "") { - $RepoTreeURL = -Join("http://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1") - $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript") + if ($GHRepoName -ne '' -and $GHCommit -ne '') { + $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1') + $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript') try { # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash @@ -188,17 +244,20 @@ function ValidateSourceLinkLinks { Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching." } } - elseif ($GHRepoName -ne "" -or $GHCommit -ne "") { - Write-Host "For using the http caching mechanism both GHRepoName and GHCommit should be informed." + elseif ($GHRepoName -ne '' -or $GHCommit -ne '') { + Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.' } if (Test-Path $ExtractPath) { Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue } + $ValidationFailures = 0 + # Process each NuGet package in parallel Get-ChildItem "$InputPath\*.symbols.nupkg" | ForEach-Object { + Write-Host "Starting $($_.FullName)" Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null $NumJobs = @(Get-Job -State 'Running').Count @@ -209,26 +268,25 @@ function ValidateSourceLinkLinks { } foreach ($Job in @(Get-Job -State 'Completed')) { - Receive-Job -Id $Job.Id + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors Remove-Job -Id $Job.Id } } - $ValidationFailures = 0 foreach ($Job in @(Get-Job)) { $jobResult = Wait-Job -Id $Job.Id | Receive-Job - if ($jobResult -ne "0") { - $ValidationFailures++ - } + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) + Remove-Job -Id $Job.Id } if ($ValidationFailures -gt 0) { - Write-PipelineTaskError " $ValidationFailures package(s) failed validation." + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation." ExitWithExitCode 1 } } function InstallSourcelinkCli { - $sourcelinkCliPackageName = "sourcelink" + $sourcelinkCliPackageName = 'sourcelink' $dotnetRoot = InitializeDotNetCli -install:$true $dotnet = "$dotnetRoot\dotnet.exe" @@ -239,7 +297,7 @@ function InstallSourcelinkCli { } else { Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..." - Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global } } @@ -247,11 +305,15 @@ function InstallSourcelinkCli { try { InstallSourcelinkCli + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + ValidateSourceLinkLinks } catch { - Write-Host $_ Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'SourceLink' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1 index 096ac321d..cd2181baf 100644 --- a/eng/common/post-build/symbols-validation.ps1 +++ b/eng/common/post-build/symbols-validation.ps1 @@ -1,127 +1,238 @@ param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored - [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion # Version of dotnet symbol to use + [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored + [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use + [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs + [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error + [Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols + [Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server ) -. $PSScriptRoot\post-build-utils.ps1 +. $PSScriptRoot\..\tools.ps1 +# Maximum number of jobs to run in parallel +$MaxParallelJobs = 16 -Add-Type -AssemblyName System.IO.Compression.FileSystem +# Max number of retries +$MaxRetry = 5 -function FirstMatchingSymbolDescriptionOrDefault { - param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols - [string] $SymbolsPath - ) - - $FileName = [System.IO.Path]::GetFileName($FullPath) - $Extension = [System.IO.Path]::GetExtension($FullPath) - - # Those below are potential symbol files that the `dotnet symbol` might - # return. Which one will be returned depend on the type of file we are - # checking and which type of file was uploaded. - - # The file itself is returned - $SymbolPath = $SymbolsPath + "\" + $FileName +# Wait time between check for system load +$SecondsBetweenLoadChecks = 10 - # PDB file for the module - $PdbPath = $SymbolPath.Replace($Extension, ".pdb") +# Set error codes +Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1 +Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2 - # PDB file for R2R module (created by crossgen) - $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb") - - # DBG file for a .so library - $SODbg = $SymbolPath.Replace($Extension, ".so.dbg") +$WindowsPdbVerificationParam = "" +if ($CheckForWindowsPdbs) { + $WindowsPdbVerificationParam = "--windows-pdbs" +} - # DWARF file for a .dylib - $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf") - - $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" - $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" +$ExclusionSet = New-Object System.Collections.Generic.HashSet[string]; - & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No symbols to validate." + ExitWithExitCode 0 +} - if (Test-Path $PdbPath) { - return "PDB" - } - elseif (Test-Path $NGenPdb) { - return "NGen PDB" - } - elseif (Test-Path $SODbg) { - return "DBG for SO" - } - elseif (Test-Path $DylibDwarf) { - return "Dwarf for Dylib" - } - elseif (Test-Path $SymbolPath) { - return "Module" - } - else { - return $null - } +#Check if the path exists +if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){ + [string[]]$Exclusions = Get-Content "$SymbolExclusionFile" + $Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} } +} +else{ + Write-Host "Symbol Exclusion file does not exists. No symbols to exclude." } -function CountMissingSymbols { +$CountMissingSymbols = { param( - [string] $PackagePath # Path to a NuGet package + [string] $PackagePath, # Path to a NuGet package + [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs ) + Add-Type -AssemblyName System.IO.Compression.FileSystem + + Write-Host "Validating $PackagePath " + # Ensure input file exist if (!(Test-Path $PackagePath)) { Write-PipelineTaskError "Input file does not exist: $PackagePath" - ExitWithExitCode 1 + return [pscustomobject]@{ + result = $using:ERROR_FILEDOESNOTEXIST + packagePath = $PackagePath + } } # Extensions for which we'll look for symbols - $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib") + $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib') # How many files are missing symbol information $MissingSymbols = 0 $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) $PackageGuid = New-Guid - $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid - $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols" + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid + $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols' - [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) + try { + [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) + } + catch { + Write-Host "Something went wrong extracting $PackagePath" + Write-Host $_ + return [pscustomobject]@{ + result = $using:ERROR_BADEXTRACT + packagePath = $PackagePath + } + } Get-ChildItem -Recurse $ExtractPath | - Where-Object {$RelevantExtensions -contains $_.Extension} | - ForEach-Object { - if ($_.FullName -Match "\\ref\\") { - Write-Host "`t Ignoring reference assembly file" $_.FullName - return + Where-Object { $RelevantExtensions -contains $_.Extension } | + ForEach-Object { + $FileName = $_.FullName + if ($FileName -Match '\\ref\\') { + Write-Host "`t Ignoring reference assembly file " $FileName + return + } + + $FirstMatchingSymbolDescriptionOrDefault = { + param( + [string] $FullPath, # Full path to the module that has to be checked + [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs. + [string] $SymbolsPath + ) + + $FileName = [System.IO.Path]::GetFileName($FullPath) + $Extension = [System.IO.Path]::GetExtension($FullPath) + + # Those below are potential symbol files that the `dotnet symbol` might + # return. Which one will be returned depend on the type of file we are + # checking and which type of file was uploaded. + + # The file itself is returned + $SymbolPath = $SymbolsPath + '\' + $FileName + + # PDB file for the module + $PdbPath = $SymbolPath.Replace($Extension, '.pdb') + + # PDB file for R2R module (created by crossgen) + $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb') + + # DBG file for a .so library + $SODbg = $SymbolPath.Replace($Extension, '.so.dbg') + + # DWARF file for a .dylib + $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') + + $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" + $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" + + $totalRetries = 0 + + while ($totalRetries -lt $using:MaxRetry) { + + # Save the output and get diagnostic output + $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String + + if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) { + return 'Module and PDB for Module' + } + elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) { + return 'Dll, PDB and NGen PDB' + } + elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) { + return 'So and DBG for SO' + } + elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) { + return 'Dylib and Dwarf for Dylib' + } + elseif (Test-Path $SymbolPath) { + return 'Module' + } + else + { + $totalRetries++ + } } + + return $null + } + + $FileRelativePath = $FileName.Replace("$ExtractPath\", "") + if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){ + Write-Host "Skipping $FileName from symbol validation" + } + + else { + $FileGuid = New-Guid + $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid - $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath - $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath + $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--microsoft-symbol-server' ` + -SymbolsPath "$ExpandedSymbolsPath-msdl" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--internal-server' ` + -SymbolsPath "$ExpandedSymbolsPath-symweb" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam - Write-Host -NoNewLine "`t Checking file" $_.FullName "... " + Write-Host -NoNewLine "`t Checking file " $FileName "... " if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { - Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")" + Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" } else { $MissingSymbols++ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { - Write-Host "No symbols found on MSDL or SymWeb!" + Write-Host 'No symbols found on MSDL or SymWeb!' } else { if ($SymbolsOnMSDL -eq $null) { - Write-Host "No symbols found on MSDL!" + Write-Host 'No symbols found on MSDL!' } else { - Write-Host "No symbols found on SymWeb!" + Write-Host 'No symbols found on SymWeb!' } } } } + } + + if ($using:Clean) { + Remove-Item $ExtractPath -Recurse -Force + } Pop-Location - return $MissingSymbols + return [pscustomobject]@{ + result = $MissingSymbols + packagePath = $PackagePath + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$DupedSymbols, + [ref]$TotalFailures) { + if ($result -eq $ERROR_BADEXTRACT) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files" + $DupedSymbols.Value++ + } + elseif ($result -eq $ERROR_FILEDOESNOTEXIST) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist" + $TotalFailures.Value++ + } + elseif ($result -gt '0') { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath" + $TotalFailures.Value++ + } + else { + Write-Host "All symbols verified for package $packagePath" + } } function CheckSymbolsAvailable { @@ -129,38 +240,72 @@ function CheckSymbolsAvailable { Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue } + $TotalPackages = 0 + $TotalFailures = 0 + $DupedSymbols = 0 + Get-ChildItem "$InputPath\*.nupkg" | ForEach-Object { $FileName = $_.Name - + $FullName = $_.FullName + # These packages from Arcade-Services include some native libraries that # our current symbol uploader can't handle. Below is a workaround until # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. - if ($FileName -Match "Microsoft\.DotNet\.Darc\.") { + if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') { Write-Host "Ignoring Arcade-services file: $FileName" Write-Host return } - elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") { + elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') { Write-Host "Ignoring Arcade-services file: $FileName" Write-Host return } - - Write-Host "Validating $FileName " - $Status = CountMissingSymbols "$InputPath\$FileName" - - if ($Status -ne 0) { - Write-PipelineTaskError "Missing symbols for $Status modules in the package $FileName" - ExitWithExitCode $exitCode + + $TotalPackages++ + + Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null + + $NumJobs = @(Get-Job -State 'Running').Count + + while ($NumJobs -ge $MaxParallelJobs) { + Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." + sleep $SecondsBetweenLoadChecks + $NumJobs = @(Get-Job -State 'Running').Count } + foreach ($Job in @(Get-Job -State 'Completed')) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + Remove-Job -Id $Job.Id + } Write-Host } + + foreach ($Job in @(Get-Job)) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + } + + if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) { + if ($TotalFailures -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages" + } + + if ($DupedSymbols -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted" + } + + ExitWithExitCode 1 + } + else { + Write-Host "All symbols validated!" + } } function InstallDotnetSymbol { - $dotnetSymbolPackageName = "dotnet-symbol" + $dotnetSymbolPackageName = 'dotnet-symbol' $dotnetRoot = InitializeDotNetCli -install:$true $dotnet = "$dotnetRoot\dotnet.exe" @@ -171,19 +316,24 @@ function InstallDotnetSymbol { } else { Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..." - Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global } } try { + . $PSScriptRoot\post-build-utils.ps1 + InstallDotnetSymbol + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + CheckSymbolsAvailable } catch { - Write-Host $_ - Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1 index 926d5b455..ac9a95778 100644 --- a/eng/common/post-build/trigger-subscriptions.ps1 +++ b/eng/common/post-build/trigger-subscriptions.ps1 @@ -2,56 +2,63 @@ param( [Parameter(Mandatory=$true)][string] $SourceRepo, [Parameter(Mandatory=$true)][int] $ChannelId, [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com", - [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16" + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', + [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' ) -. $PSScriptRoot\post-build-utils.ps1 +try { + . $PSScriptRoot\post-build-utils.ps1 -# Get all the $SourceRepo subscriptions -$normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '') -$subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId + # Get all the $SourceRepo subscriptions + $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '') + $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId -if (!$subscriptions) { - Write-Host "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'" - ExitWithExitCode 0 -} + if (!$subscriptions) { + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'" + ExitWithExitCode 0 + } -$subscriptionsToTrigger = New-Object System.Collections.Generic.List[string] -$failedTriggeredSubscription = $false + $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string] + $failedTriggeredSubscription = $false -# Get all enabled subscriptions that need dependency flow on 'everyBuild' -foreach ($subscription in $subscriptions) { - if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) { - Write-Host "Should trigger this subscription: $subscription.id" - [void]$subscriptionsToTrigger.Add($subscription.id) + # Get all enabled subscriptions that need dependency flow on 'everyBuild' + foreach ($subscription in $subscriptions) { + if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) { + Write-Host "Should trigger this subscription: ${$subscription.id}" + [void]$subscriptionsToTrigger.Add($subscription.id) + } } -} -foreach ($subscriptionToTrigger in $subscriptionsToTrigger) { - try { - Write-Host "Triggering subscription '$subscriptionToTrigger'." - - Trigger-Subscription -SubscriptionId $subscriptionToTrigger - - Write-Host "done." - } - catch - { - Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'" - Write-Host $_ - Write-Host $_.ScriptStackTrace - $failedTriggeredSubscription = $true + foreach ($subscriptionToTrigger in $subscriptionsToTrigger) { + try { + Write-Host "Triggering subscription '$subscriptionToTrigger'." + + Trigger-Subscription -SubscriptionId $subscriptionToTrigger + + Write-Host 'done.' + } + catch + { + Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'" + Write-Host $_ + Write-Host $_.ScriptStackTrace + $failedTriggeredSubscription = $true + } } -} -if ($subscriptionsToTrigger.Count -eq 0) { - Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'." + if ($subscriptionsToTrigger.Count -eq 0) { + Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'." + } + elseif ($failedTriggeredSubscription) { + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...' + ExitWithExitCode 1 + } + else { + Write-Host 'All subscriptions were triggered successfully!' + } } -elseif ($failedTriggeredSubscription) { - Write-Host "At least one subscription failed to be triggered..." +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_ ExitWithExitCode 1 } -else { - Write-Host "All subscriptions were triggered successfully!" -} diff --git a/eng/common/retain-build.ps1 b/eng/common/retain-build.ps1 new file mode 100644 index 000000000..e7ba975ad --- /dev/null +++ b/eng/common/retain-build.ps1 @@ -0,0 +1,45 @@ + +Param( +[Parameter(Mandatory=$true)][int] $buildId, +[Parameter(Mandatory=$true)][string] $azdoOrgUri, +[Parameter(Mandatory=$true)][string] $azdoProject, +[Parameter(Mandatory=$true)][string] $token +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +function Get-AzDOHeaders( + [string] $token) +{ + $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}")) + $headers = @{"Authorization"="Basic $base64AuthInfo"} + return $headers +} + +function Update-BuildRetention( + [string] $azdoOrgUri, + [string] $azdoProject, + [int] $buildId, + [string] $token) +{ + $headers = Get-AzDOHeaders -token $token + $requestBody = "{ + `"keepForever`": `"true`" + }" + + $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0" + write-Host "Attempting to retain build using the following URI: ${requestUri} ..." + + try { + Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json" + Write-Host "Updated retention settings for build ${buildId}." + } + catch { + Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription" + exit 1 + } +} + +Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token +exit 0 diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index d0eec5163..73828dd30 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -1,8 +1,8 @@ [CmdletBinding(PositionalBinding=$false)] Param( - [string] $configuration = "Debug", + [string] $configuration = 'Debug', [string] $task, - [string] $verbosity = "minimal", + [string] $verbosity = 'minimal', [string] $msbuildEngine = $null, [switch] $restore, [switch] $prepareMachine, @@ -32,9 +32,9 @@ function Print-Usage() { } function Build([string]$target) { - $logSuffix = if ($target -eq "Execute") { "" } else { ".$target" } + $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } $log = Join-Path $LogDir "$task$logSuffix.binlog" - $outputPath = Join-Path $ToolsetDir "$task\\" + $outputPath = Join-Path $ToolsetDir "$task\" MSBuild $taskProject ` /bl:$log ` @@ -42,37 +42,55 @@ function Build([string]$target) { /p:Configuration=$configuration ` /p:RepoRoot=$RepoRoot ` /p:BaseIntermediateOutputPath=$outputPath ` + /v:$verbosity ` @properties } try { - if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) { + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { Print-Usage exit 0 } if ($task -eq "") { - Write-Host "Missing required parameter '-task '" -ForegroundColor Red + Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" Print-Usage ExitWithExitCode 1 } + if( $msbuildEngine -eq "vs") { + # Ensure desktop MSBuild is available for sdk tasks. + if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) { + $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty + } + if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty + } + if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { + $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true + } + if ($xcopyMSBuildToolsFolder -eq $null) { + throw 'Unable to get xcopy downloadable version of msbuild' + } + + $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe" + } + $taskProject = GetSdkTaskProject $task if (!(Test-Path $taskProject)) { - Write-Host "Unknown task: $task" -ForegroundColor Red + Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" ExitWithExitCode 1 } if ($restore) { - Build "Restore" + Build 'Restore' } - Build "Execute" + Build 'Execute' } catch { - Write-Host $_ - Write-Host $_.Exception Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config index 0c5451c11..5bfbb02ef 100644 --- a/eng/common/sdl/NuGet.config +++ b/eng/common/sdl/NuGet.config @@ -5,8 +5,13 @@ - + + + + + + diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1 new file mode 100644 index 000000000..27f5a4115 --- /dev/null +++ b/eng/common/sdl/configure-sdl-tool.ps1 @@ -0,0 +1,130 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $TargetDirectory, + [string] $GdnFolder, + # The list of Guardian tools to configure. For each object in the array: + # - If the item is a [hashtable], it must contain these entries: + # - Name = The tool name as Guardian knows it. + # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique + # among all tool entries with the same Name. + # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")' + # - If the item is a [string] $v, it is treated as '@{ Name="$v" }' + [object[]] $ToolsList, + [string] $GuardianLoggerLevel='Standard', + # Optional: Additional params to add to any tool using CredScan. + [string[]] $CrScanAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using PoliCheck. + [string[]] $PoliCheckAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using CodeQL/Semmle. + [string[]] $CodeQLAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using Binskim. + [string[]] $BinskimAdditionalRunConfigParams +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # Normalize tools list: all in [hashtable] form with defined values for each key. + $ToolsList = $ToolsList | + ForEach-Object { + if ($_ -is [string]) { + $_ = @{ Name = $_ } + } + + if (-not ($_['Scenario'])) { $_.Scenario = "" } + if (-not ($_['Args'])) { $_.Args = @() } + $_ + } + + Write-Host "List of tools to configure:" + $ToolsList | ForEach-Object { $_ | Out-String | Write-Host } + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + foreach ($tool in $ToolsList) { + # Put together the name and scenario to make a unique key. + $toolConfigName = $tool.Name + if ($tool.Scenario) { + $toolConfigName += "_" + $tool.Scenario + } + + Write-Host "=== Configuring $toolConfigName..." + + $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" + + # For some tools, add default and automatic args. + switch -Exact ($tool.Name) { + 'credscan' { + if ($targetDirectory) { + $tool.Args += "`"TargetDirectory < $TargetDirectory`"" + } + $tool.Args += "`"OutputType < pre`"" + $tool.Args += $CrScanAdditionalRunConfigParams + } + 'policheck' { + if ($targetDirectory) { + $tool.Args += "`"Target < $TargetDirectory`"" + } + $tool.Args += $PoliCheckAdditionalRunConfigParams + } + {$_ -in 'semmle', 'codeql'} { + if ($targetDirectory) { + $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" + } + $tool.Args += $CodeQLAdditionalRunConfigParams + } + 'binskim' { + if ($targetDirectory) { + # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924. + # We are excluding all `_.pdb` files from the scan. + $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`"" + } + $tool.Args += $BinskimAdditionalRunConfigParams + } + } + + # Create variable pointing to the args array directly so we can use splat syntax later. + $toolArgs = $tool.Args + + # Configure the tool. If args array is provided or the current tool has some default arguments + # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}", + # one per parameter. Doc page for "guardian configure": + # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure + Exec-BlockVerbosely { + & $GuardianCliLocation configure ` + --working-directory $WorkingDirectory ` + --tool $tool.Name ` + --output-path $gdnConfigFile ` + --logger-level $GuardianLoggerLevel ` + --noninteractive ` + --force ` + $(if ($toolArgs) { "--args" }) @toolArgs + Exit-IfNZEC "Sdl" + } + + Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1 index 01799d63f..81ded5b7f 100644 --- a/eng/common/sdl/execute-all-sdl-tools.ps1 +++ b/eng/common/sdl/execute-all-sdl-tools.ps1 @@ -1,100 +1,165 @@ Param( - [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified) - [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified) - [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified - [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade) - [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master - [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located - [string] $ArtifactsDirectory = (Join-Path $env:BUILD_SOURCESDIRECTORY ("artifacts")), # Required: the directory where build artifacts are located - [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault - [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code - [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts - [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. - [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. - [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber) - [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed - [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs. - [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs. - [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs. - [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. - [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. - [string] $GuardianLoggerLevel="Standard", # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error - [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") - [string[]] $PoliCheckAdditionalRunConfigParams # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") + [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified) + [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified) + [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified + [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade) + [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master + [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located + [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located + + # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list + # format. + [object[]] $SourceToolsList, + # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools + # list format. + [object[]] $ArtifactToolsList, + # Optional: list of SDL tools to run without automatically specifying a target directory. See + # 'configure-sdl-tool.ps1' for tools list format. + [object[]] $CustomToolsList, + + [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. + [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. + [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber) + [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed + [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs. + [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs. + [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error + [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") + [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") + [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1") + [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1") + [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run ) -$ErrorActionPreference = "Stop" -Set-StrictMode -Version 2.0 -$LASTEXITCODE = 0 +try { + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 -#Replace repo names to the format of org/repo -if (!($Repository.contains('/'))) { - $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2'; -} -else{ - $RepoName = $Repository; -} + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 -if ($GuardianPackageName) { - $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path "tools" "guardian.cmd")) -} else { - $guardianCliLocation = $GuardianCliLocation -} + #Replace repo names to the format of org/repo + if (!($Repository.contains('/'))) { + $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2'; + } + else{ + $RepoName = $Repository; + } -$workingDirectory = (Split-Path $SourceDirectory -Parent) -$ValidPath = Test-Path $guardianCliLocation + if ($GuardianPackageName) { + $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd')) + } else { + $guardianCliLocation = $GuardianCliLocation + } -if ($ValidPath -eq $False) -{ - Write-Host "Invalid Guardian CLI Location." - exit 1 -} + $workingDirectory = (Split-Path $SourceDirectory -Parent) + $ValidPath = Test-Path $guardianCliLocation -& $(Join-Path $PSScriptRoot "init-sdl.ps1") -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel -$gdnFolder = Join-Path $workingDirectory ".gdn" + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.' + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -GuardianLoggerLevel $GuardianLoggerLevel + } + $gdnFolder = Join-Path $workingDirectory '.gdn' -if ($TsaOnboard) { - if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { - Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel" - & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel - if ($LASTEXITCODE -ne 0) { - Write-Host "Guardian tsa-onboard failed with exit code $LASTEXITCODE." - exit $LASTEXITCODE + if ($TsaOnboard) { + if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { + Exec-BlockVerbosely { + & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.' + ExitWithExitCode 1 } - } else { - Write-Host "Could not onboard to TSA -- not all required values ($$TsaCodebaseName, $$TsaNotificationEmail, $$TsaCodebaseAdmin, $$TsaBugAreaPath) were specified." - exit 1 } -} -if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) { - & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams -} -if ($SourceToolsList -and $SourceToolsList.Count -gt 0) { - & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams -} + # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory. + function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) { + if ($tools -and $tools.Count -gt 0) { + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $workingDirectory ` + -TargetDirectory $targetDirectory ` + -GdnFolder $gdnFolder ` + -ToolsList $tools ` + -GuardianLoggerLevel $GuardianLoggerLevel ` + -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` + -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams ` + -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams ` + -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams + if ($BreakOnFailure) { + Exit-IfNZEC "Sdl" + } + } + } + } -if ($UpdateBaseline) { - & (Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Update baseline" -} + # Configure Artifact and Source tools with default Target directories. + Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory + Configure-ToolsList $SourceToolsList $SourceDirectory + # Configure custom tools with no default Target directory. + Configure-ToolsList $CustomToolsList $null -if ($TsaPublish) { - if ($TsaBranchName -and $BuildNumber) { - if (-not $TsaRepositoryName) { - $TsaRepositoryName = "$($Repository)-$($BranchName)" + # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call. + # (If we used "run" multiple times, each run would overwrite data from earlier runs.) + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'run-sdl.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $SourceDirectory ` + -UpdateBaseline $UpdateBaseline ` + -GdnFolder $gdnFolder + } + + if ($TsaPublish) { + if ($TsaBranchName -and $BuildNumber) { + if (-not $TsaRepositoryName) { + $TsaRepositoryName = "$($Repository)-$($BranchName)" + } + Exec-BlockVerbosely { + & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.' + ExitWithExitCode 1 } - Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel" - & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel - if ($LASTEXITCODE -ne 0) { - Write-Host "Guardian tsa-publish failed with exit code $LASTEXITCODE." - exit $LASTEXITCODE + } + + if ($BreakOnFailure) { + Write-Host "Failing the build in case of breaking results..." + Exec-BlockVerbosely { + & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel } } else { - Write-Host "Could not publish to TSA -- not all required values ($$TsaBranchName, $$BuildNumber) were specified." - exit 1 + Write-Host "Letting the build pass even if there were breaking results..." } } +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + exit 1 +} diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1 new file mode 100644 index 000000000..68da4fbf2 --- /dev/null +++ b/eng/common/sdl/extract-artifact-archives.ps1 @@ -0,0 +1,63 @@ +# This script looks for each archive file in a directory and extracts it into the target directory. +# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**". +# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip. +param( + # Full path to directory where archives are stored. + [Parameter(Mandatory=$true)][string] $InputPath, + # Full path to directory to extract archives into. May be the same as $InputPath. + [Parameter(Mandatory=$true)][string] $ExtractPath +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + Measure-Command { + $jobs = @() + + # Find archive files for non-Windows and Windows builds. + $archiveFiles = @( + Get-ChildItem (Join-Path $InputPath "*.tar.gz") + Get-ChildItem (Join-Path $InputPath "*.zip") + ) + + foreach ($targzFile in $archiveFiles) { + $jobs += Start-Job -ScriptBlock { + $file = $using:targzFile + $fileName = [System.IO.Path]::GetFileName($file) + $extractDir = Join-Path $using:ExtractPath "$fileName.extracted" + + New-Item $extractDir -ItemType Directory -Force | Out-Null + + Write-Host "Extracting '$file' to '$extractDir'..." + + # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early. + # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the + # error. Save output so it can be stored in the exception string along with context. + $output = tar -xf $file -C $extractDir 2>&1 + # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we + # don't have access to the outer scope. + if ($LASTEXITCODE -ne 0) { + throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'" + } + + Write-Host "Extracted to $extractDir" + } + } + + Receive-Job $jobs -Wait + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1 index 6e6825013..f031ed5b2 100644 --- a/eng/common/sdl/extract-artifact-packages.ps1 +++ b/eng/common/sdl/extract-artifact-packages.ps1 @@ -3,54 +3,12 @@ param( [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted ) -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 -# `tools.ps1` checks $ci to perform some actions. Since the post-build -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -. $PSScriptRoot\..\tools.ps1 +$disableConfigureToolsetImport = $true -$ExtractPackage = { - param( - [string] $PackagePath # Full path to a NuGet package - ) - - if (!(Test-Path $PackagePath)) { - Write-PipelineTaskError "Input file does not exist: $PackagePath" - ExitWithExitCode 1 - } - - $RelevantExtensions = @(".dll", ".exe", ".pdb") - Write-Host -NoNewLine "Extracting" ([System.IO.Path]::GetFileName($PackagePath)) "... " - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId - - Add-Type -AssemblyName System.IO.Compression.FileSystem - - [System.IO.Directory]::CreateDirectory($ExtractPath); - - try { - $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) - - $zip.Entries | - Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | - ForEach-Object { - $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name - - [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) - } - } - catch { - - } - finally { - $zip.Dispose() - } - } - function ExtractArtifacts { +function ExtractArtifacts { if (!(Test-Path $InputPath)) { Write-Host "Input Path does not exist: $InputPath" ExitWithExitCode 0 @@ -67,11 +25,58 @@ $ExtractPackage = { } try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $ExtractPackage = { + param( + [string] $PackagePath # Full path to a NuGet package + ) + + if (!(Test-Path $PackagePath)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath" + ExitWithExitCode 1 + } + + $RelevantExtensions = @('.dll', '.exe', '.pdb') + Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...' + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + [System.IO.Directory]::CreateDirectory($ExtractPath); + + try { + $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) + + $zip.Entries | + Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | + ForEach-Object { + $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName) + [System.IO.Directory]::CreateDirectory($TargetPath); + + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile) + } + } + catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 + } + finally { + $zip.Dispose() + } + } Measure-Command { ExtractArtifacts } } catch { Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ ExitWithExitCode 1 } diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1 index c737eb0e7..588ff8e22 100644 --- a/eng/common/sdl/init-sdl.ps1 +++ b/eng/common/sdl/init-sdl.ps1 @@ -1,51 +1,47 @@ Param( [string] $GuardianCliLocation, [string] $Repository, - [string] $BranchName="master", + [string] $BranchName='master', [string] $WorkingDirectory, - [string] $AzureDevOpsAccessToken, - [string] $GuardianLoggerLevel="Standard" + [string] $GuardianLoggerLevel='Standard' ) -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 -$LASTEXITCODE = 0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 # Don't display the console progress UI - it's a huge perf hit $ProgressPreference = 'SilentlyContinue' -# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file -$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken")) -$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn") -$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0-preview.1" -$zipFile = "$WorkingDirectory/gdn.zip" - Add-Type -AssemblyName System.IO.Compression.FileSystem -$gdnFolder = (Join-Path $WorkingDirectory ".gdn") -Try -{ - # We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead - Write-Host "Downloading gdn folder from internal config repostiory..." - Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile - if (Test-Path $gdnFolder) { - # Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case) - Remove-Item -Force -Recurse $gdnFolder - } - [System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory) - Write-Host $gdnFolder -} Catch [System.Net.WebException] { + +try { # if the folder does not exist, we'll do a guardian init and push it to the remote repository - Write-Host "Initializing Guardian..." + Write-Host 'Initializing Guardian...' Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel" & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel if ($LASTEXITCODE -ne 0) { - Write-Error "Guardian init failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE } # We create the mainbaseline so it can be edited later Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline" & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline if ($LASTEXITCODE -ne 0) { - Write-Error "Guardian baseline failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE } - & $(Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Initialize gdn folder" -} \ No newline at end of file + ExitWithExitCode 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config index 968b39bef..4585cfd6b 100644 --- a/eng/common/sdl/packages.config +++ b/eng/common/sdl/packages.config @@ -1,4 +1,4 @@ - + diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1 deleted file mode 100644 index 79c707d6d..000000000 --- a/eng/common/sdl/push-gdn.ps1 +++ /dev/null @@ -1,51 +0,0 @@ -Param( - [string] $Repository, - [string] $BranchName="master", - [string] $GdnFolder, - [string] $AzureDevOpsAccessToken, - [string] $PushReason -) - -$ErrorActionPreference = "Stop" -Set-StrictMode -Version 2.0 -$LASTEXITCODE = 0 - -# We create the temp directory where we'll store the sdl-config repository -$sdlDir = Join-Path $env:TEMP "sdl" -if (Test-Path $sdlDir) { - Remove-Item -Force -Recurse $sdlDir -} - -Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir" -git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir -if ($LASTEXITCODE -ne 0) { - Write-Error "Git clone failed with exit code $LASTEXITCODE." -} -# We copy the .gdn folder from our local run into the git repository so it can be committed -$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn" -if (Get-Command Robocopy) { - Robocopy /S $GdnFolder $sdlRepositoryFolder -} else { - rsync -r $GdnFolder $sdlRepositoryFolder -} -# cd to the sdl-config directory so we can run git there -Push-Location $sdlDir -# git add . --> git commit --> git push -Write-Host "git add ." -git add . -if ($LASTEXITCODE -ne 0) { - Write-Error "Git add failed with exit code $LASTEXITCODE." -} -Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`"" -git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName" -if ($LASTEXITCODE -ne 0) { - Write-Error "Git commit failed with exit code $LASTEXITCODE." -} -Write-Host "git push" -git push -if ($LASTEXITCODE -ne 0) { - Write-Error "Git push failed with exit code $LASTEXITCODE." -} - -# Return to the original directory -Pop-Location \ No newline at end of file diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1 index 9bc25314a..2eac8c78f 100644 --- a/eng/common/sdl/run-sdl.ps1 +++ b/eng/common/sdl/run-sdl.ps1 @@ -1,59 +1,49 @@ Param( [string] $GuardianCliLocation, [string] $WorkingDirectory, - [string] $TargetDirectory, [string] $GdnFolder, - [string[]] $ToolsList, [string] $UpdateBaseline, - [string] $GuardianLoggerLevel="Standard", - [string[]] $CrScanAdditionalRunConfigParams, - [string[]] $PoliCheckAdditionalRunConfigParams + [string] $GuardianLoggerLevel='Standard' ) -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 -$LASTEXITCODE = 0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 -# We store config files in the r directory of .gdn -Write-Host $ToolsList -$gdnConfigPath = Join-Path $GdnFolder "r" -$ValidPath = Test-Path $GuardianCliLocation +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 -if ($ValidPath -eq $False) -{ - Write-Host "Invalid Guardian CLI Location." - exit 1 -} - -$configParam = @("--config") + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation -foreach ($tool in $ToolsList) { - $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig" - Write-Host $tool - # We have to manually configure tools that run on source to look at the source directory only - if ($tool -eq "credscan") { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE." - exit $LASTEXITCODE - } - } - if ($tool -eq "policheck") { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE." - exit $LASTEXITCODE - } + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 } - $configParam+=$gdnConfigFile -} + $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig' + Write-Host "Discovered Guardian config files:" + $gdnConfigFiles | Out-String | Write-Host -Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam" -& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam -if ($LASTEXITCODE -ne 0) { - Write-Host "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE." - exit $LASTEXITCODE + Exec-BlockVerbosely { + & $GuardianCliLocation run ` + --working-directory $WorkingDirectory ` + --baseline mainbaseline ` + --update-baseline $UpdateBaseline ` + --logger-level $GuardianLoggerLevel ` + --config @gdnConfigFiles + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 } diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1 new file mode 100644 index 000000000..7fe603fe9 --- /dev/null +++ b/eng/common/sdl/sdl.ps1 @@ -0,0 +1,40 @@ + +function Install-Gdn { + param( + [Parameter(Mandatory=$true)] + [string]$Path, + + [string]$Source = "https://pkgs.dev.azure.com/dnceng/_packaging/Guardian1ESPTUpstreamOrgFeed/nuget/v3/index.json", + + # If omitted, install the latest version of Guardian, otherwise install that specific version. + [string]$Version + ) + + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $argumentList = @("install", "Microsoft.Guardian.Cli.win-x64", "-Source $Source", "-OutputDirectory $Path", "-NonInteractive", "-NoCache") + + if ($Version) { + $argumentList += "-Version $Version" + } + + Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait + + $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path + + if (!$gdnCliPath) + { + Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian' + } + + return $gdnCliPath.FullName +} \ No newline at end of file diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1 new file mode 100644 index 000000000..a2e004877 --- /dev/null +++ b/eng/common/sdl/trim-assets-version.ps1 @@ -0,0 +1,75 @@ +<# +.SYNOPSIS +Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name. + +.PARAMETER InputPath +Full path to directory where artifact packages are stored + +.PARAMETER Recursive +Search for NuGet packages recursively + +#> + +Param( + [string] $InputPath, + [bool] $Recursive = $true +) + +$CliToolName = "Microsoft.DotNet.VersionTools.Cli" + +function Install-VersionTools-Cli { + param( + [Parameter(Mandatory=$true)][string]$Version + ) + + Write-Host "Installing the package '$CliToolName' with a version of '$version' ..." + $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" + + $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed") + Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait +} + +# ------------------------------------------------------------------- + +if (!(Test-Path $InputPath)) { + Write-Host "Input Path '$InputPath' does not exist" + ExitWithExitCode 1 +} + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +try { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + + $toolsetVersion = Read-ArcadeSdkVersion + Install-VersionTools-Cli -Version $toolsetVersion + + $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName}) + if ($null -eq $cliToolFound) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed." + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & "$dotnet" $CliToolName trim-assets-version ` + --assets-path $InputPath ` + --recursive $Recursive + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} \ No newline at end of file diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml new file mode 100644 index 000000000..1f035fee7 --- /dev/null +++ b/eng/common/templates-official/job/job.yml @@ -0,0 +1,264 @@ +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + templateContext: '' + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + artifacts: '' + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + preSteps: [] + runAsPublic: false +# Sbom related params + enableSbom: true + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/templates-official/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) + + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish pipeline artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - task: 1ES.PublishPipelineArtifact@1 + inputs: + targetPath: 'artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates-official/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - task: 1ES.PublishPipelineArtifact@1 + inputs: + targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true \ No newline at end of file diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml new file mode 100644 index 000000000..52b4d05d3 --- /dev/null +++ b/eng/common/templates-official/job/onelocbuild.yml @@ -0,0 +1,112 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/templates-official/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml new file mode 100644 index 000000000..011732880 --- /dev/null +++ b/eng/common/templates-official/job/publish-build-assets.yml @@ -0,0 +1,160 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates-official/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Build Assets + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 + arguments: > + -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force + $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" + Add-Content -Path $filePath -Value $(BARBuildId) + Add-Content -Path $filePath -Value "$(DefaultChannels)" + Add-Content -Path $filePath -Value $(IsStableBuild) + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish ReleaseConfigs Artifact + inputs: + PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/templates-official/steps/publish-logs.yml + parameters: + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml new file mode 100644 index 000000000..f983033bb --- /dev/null +++ b/eng/common/templates-official/job/source-build.yml @@ -0,0 +1,75 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + + # If set to true and running on a non-public project, + # Internal blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open + + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + image: 1es-mariner-2 + os: linux + + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - ${{ if eq(parameters.enableInternalSources, true) }}: + - template: /eng/common/templates-official/steps/enable-internal-runtimes.yml + - template: /eng/common/templates-official/steps/source-build.yml + parameters: + platform: ${{ parameters.platform }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml new file mode 100644 index 000000000..60dfb6b2d --- /dev/null +++ b/eng/common/templates-official/job/source-index-stage1.yml @@ -0,0 +1,83 @@ +parameters: + runAsPublic: false + sourceIndexUploadPackageVersion: 2.0.0-20240502.12 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexUploadPackageVersion + value: ${{ parameters.sourceIndexUploadPackageVersion }} + - name: SourceIndexProcessBinlogPackageVersion + value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/templates-official/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + os: windows + + steps: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET 8 SDK + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: AzureCLI@2 + displayName: Get stage 1 auth token + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId" + echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken" + echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId" + + - script: | + az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN) + displayName: "Login to Azure" + + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 + displayName: Upload stage1 artifacts to source index diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml new file mode 100644 index 000000000..b68d3c2f3 --- /dev/null +++ b/eng/common/templates-official/jobs/codeql-build.yml @@ -0,0 +1,31 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + +jobs: +- template: /eng/common/templates-official/jobs/jobs.yml + parameters: + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml new file mode 100644 index 000000000..857a0f8ba --- /dev/null +++ b/eng/common/templates-official/jobs/jobs.yml @@ -0,0 +1,97 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/templates-official/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - template: ../job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/templates-official/jobs/source-build.yml + parameters: + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml new file mode 100644 index 000000000..5cf6a269c --- /dev/null +++ b/eng/common/templates-official/jobs/source-build.yml @@ -0,0 +1,54 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/templates-official/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + + # If set to true and running on a non-public project, + # Internal nuget and blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + enableInternalSources: ${{ parameters.enableInternalSources }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} + enableInternalSources: ${{ parameters.enableInternalSources }} diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml new file mode 100644 index 000000000..c24193acf --- /dev/null +++ b/eng/common/templates-official/post-build/common-variables.yml @@ -0,0 +1,22 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro-prod.westus2.cloudapp.azure.com" + - name: MaestroApiAccessToken + value: $(MaestroAccessToken) + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml new file mode 100644 index 000000000..b81b8770b --- /dev/null +++ b/eng/common/templates-official/post-build/post-build.yml @@ -0,0 +1,287 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: ../steps/publish-logs.yml + parameters: + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml new file mode 100644 index 000000000..0c87f149a --- /dev/null +++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -0,0 +1,70 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + +steps: + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: PowerShell@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + targetType: inline + pwsh: true + script: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" + + $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $apiHeaders.Add('Accept', 'application/json') + $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") + + $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + + $BarId = $Env:BARBuildId + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + MAESTRO_API_TOKEN: $(MaestroApiAccessToken) + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml new file mode 100644 index 000000000..da669030d --- /dev/null +++ b/eng/common/templates-official/post-build/trigger-subscription.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Triggering subscriptions + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 + arguments: -SourceRepo $(Build.Repository.Uri) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/promote-build.yml b/eng/common/templates-official/steps/add-build-to-channel.yml similarity index 68% rename from eng/common/templates/steps/promote-build.yml rename to eng/common/templates-official/steps/add-build-to-channel.yml index b90404435..f67a210d6 100644 --- a/eng/common/templates/steps/promote-build.yml +++ b/eng/common/templates-official/steps/add-build-to-channel.yml @@ -5,9 +5,9 @@ steps: - task: PowerShell@2 displayName: Add Build to Channel inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1 + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 arguments: -BuildId $(BARBuildId) -ChannelId ${{ parameters.ChannelId }} -MaestroApiAccessToken $(MaestroApiAccessToken) -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml new file mode 100644 index 000000000..eba58109b --- /dev/null +++ b/eng/common/templates-official/steps/build-reason.yml @@ -0,0 +1,12 @@ +# build-reason.yml +# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons +# to include steps (',' separated). +parameters: + conditions: '' + steps: [] + +steps: + - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: + - ${{ parameters.steps }} + - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml new file mode 100644 index 000000000..cbba05967 --- /dev/null +++ b/eng/common/templates-official/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/enable-internal-runtimes.yml b/eng/common/templates-official/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..93a8394a6 --- /dev/null +++ b/eng/common/templates-official/steps/enable-internal-runtimes.yml @@ -0,0 +1,28 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +parameters: +- name: federatedServiceConnection + type: string + default: 'dotnetbuilds-internal-read' +- name: outputVariableName + type: string + default: 'dotnetbuilds-internal-container-read-token-base64' +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: true + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - template: /eng/common/templates-official/steps/get-delegation-sas.yml + parameters: + federatedServiceConnection: ${{ parameters.federatedServiceConnection }} + outputVariableName: ${{ parameters.outputVariableName }} + expiryInHours: ${{ parameters.expiryInHours }} + base64Encode: ${{ parameters.base64Encode }} + storageAccount: dotnetbuilds + container: internal + permissions: rl diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml new file mode 100644 index 000000000..9b4a5ffa3 --- /dev/null +++ b/eng/common/templates-official/steps/execute-codeql.yml @@ -0,0 +1,32 @@ +parameters: + # Language that should be analyzed. Defaults to csharp + language: csharp + # Build Commands + buildCommands: '' + overrideParameters: '' # Optional: to override values for parameters. + additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' + # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named + # 'continueOnError', the parameter value is not correctly picked up. + # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter + # optional: determines whether to continue the build if the step errors; + sdlContinueOnError: false + +steps: +- template: /eng/common/templates-official/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: '${{ parameters.additionalParameters }} + -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml new file mode 100644 index 000000000..301d5c591 --- /dev/null +++ b/eng/common/templates-official/steps/execute-sdl.yml @@ -0,0 +1,86 @@ +parameters: + overrideGuardianVersion: '' + executeAllSdlToolsScript: '' + overrideParameters: '' + additionalParameters: '' + publishGuardianDirectoryToPipeline: false + sdlContinueOnError: false + condition: '' + +steps: +- task: NuGetAuthenticate@1 + +- task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian (Overridden) + +- ${{ if eq(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian + +- ${{ if ne(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} + displayName: Execute SDL (Overridden) + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if eq(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianCliLocation $(GuardianCliLocation) + -NugetPackageDirectory $(Build.SourcesDirectory)\.packages + -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) + ${{ parameters.additionalParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() + + # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration + # with the "SARIF SAST Scans Tab" Azure DevOps extension + - task: CopyFiles@2 + displayName: Copy SARIF files + inputs: + flattenFolders: true + sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ + contents: '**/*.sarif' + targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs + condition: succeededOrFailed() + + # Use PublishBuildArtifacts because the SARIF extension only checks this case + # see microsoft/sarif-azuredevops-extension#4 + - task: PublishBuildArtifacts@1 + displayName: Publish SARIF files to CodeAnalysisLogs container + inputs: + pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs + artifactName: CodeAnalysisLogs + condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml new file mode 100644 index 000000000..1bf43bf80 --- /dev/null +++ b/eng/common/templates-official/steps/generate-sbom.yml @@ -0,0 +1,48 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 8.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + inputs: + targetPath: '${{parameters.manifestDirPath}}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/templates-official/steps/get-delegation-sas.yml b/eng/common/templates-official/steps/get-delegation-sas.yml new file mode 100644 index 000000000..c690cc0a0 --- /dev/null +++ b/eng/common/templates-official/steps/get-delegation-sas.yml @@ -0,0 +1,52 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: false +- name: storageAccount + type: string +- name: container + type: string +- name: permissions + type: string + default: 'rl' + +steps: +- task: AzureCLI@2 + displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}' + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + # Calculate the expiration of the SAS token and convert to UTC + $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") + + # Temporarily work around a helix issue where SAS tokens with / in them will cause incorrect downloads + # of correlation payloads. https://github.com/dotnet/dnceng/issues/3484 + $sas = "" + do { + $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + } while($sas.IndexOf('/') -ne -1) + + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + + if ('${{ parameters.base64Encode }}' -eq 'true') { + $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas)) + } + + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas" diff --git a/eng/common/templates-official/steps/get-federated-access-token.yml b/eng/common/templates-official/steps/get-federated-access-token.yml new file mode 100644 index 000000000..55e33bd38 --- /dev/null +++ b/eng/common/templates-official/steps/get-federated-access-token.yml @@ -0,0 +1,40 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: stepName + type: string + default: 'getFederatedAccessToken' +- name: condition + type: string + default: '' +# Resource to get a token for. Common values include: +# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps +# - 'https://storage.azure.com/' for storage +# Defaults to Azure DevOps +- name: resource + type: string + default: '499b84ac-1321-427f-aa17-267ca6975798' +- name: isStepOutputVariable + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Getting federated access token for feeds' + name: ${{ parameters.stepName }} + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to get access token for resource '${{ parameters.resource }}'" + exit 1 + } + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken" \ No newline at end of file diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml new file mode 100644 index 000000000..04012fed1 --- /dev/null +++ b/eng/common/templates-official/steps/publish-logs.yml @@ -0,0 +1,23 @@ +parameters: + StageLabel: '' + JobLabel: '' + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' + PublishLocation: Container + ArtifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml new file mode 100644 index 000000000..83d97a26a --- /dev/null +++ b/eng/common/templates-official/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml similarity index 57% rename from eng/common/templates/steps/perf-send-to-helix.yml rename to eng/common/templates-official/steps/send-to-helix.yml index b3ea9acf1..3eb7e2d5f 100644 --- a/eng/common/templates/steps/perf-send-to-helix.yml +++ b/eng/common/templates-official/steps/send-to-helix.yml @@ -3,63 +3,88 @@ parameters: HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixConfiguration: '' # optional -- additional property attached to a job HelixPreCommands: '' # optional -- commands to run before Helix work item execution HelixPostCommands: '' # optional -- commands to run after Helix work item execution WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json - EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false steps: - - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 $(Build.SourcesDirectory)\eng\common\performance\perfhelixpublish.proj /restore /t:Test /bl:$(Build.SourcesDirectory)\artifacts\log\$env:BuildConfig\SendToHelix.binlog + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' displayName: ${{ parameters.DisplayNamePrefix }} (Windows) env: BuildConfig: $(_BuildConfig) HelixSource: ${{ parameters.HelixSource }} HelixType: ${{ parameters.HelixType }} HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} HelixTargetQueues: ${{ parameters.HelixTargetQueues }} HelixAccessToken: ${{ parameters.HelixAccessToken }} HelixPreCommands: ${{ parameters.HelixPreCommands }} HelixPostCommands: ${{ parameters.HelixPostCommands }} WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/performance/perfhelixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog displayName: ${{ parameters.DisplayNamePrefix }} (Unix) env: BuildConfig: $(_BuildConfig) HelixSource: ${{ parameters.HelixSource }} HelixType: ${{ parameters.HelixType }} HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} HelixTargetQueues: ${{ parameters.HelixTargetQueues }} HelixAccessToken: ${{ parameters.HelixAccessToken }} HelixPreCommands: ${{ parameters.HelixPreCommands }} HelixPostCommands: ${{ parameters.HelixPostCommands }} WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml new file mode 100644 index 000000000..829f17c34 --- /dev/null +++ b/eng/common/templates-official/steps/source-build.yml @@ -0,0 +1,129 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/source-build/self/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish BuildLogs + inputs: + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml new file mode 100644 index 000000000..1f308b24e --- /dev/null +++ b/eng/common/templates-official/variables/pool-providers.yml @@ -0,0 +1,45 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates-official/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) +# +# pool: +# name: $(DncEngInternalBuildPool) +# image: 1es-windows-2022 + +variables: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] \ No newline at end of file diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml new file mode 100644 index 000000000..dbdd66d4a --- /dev/null +++ b/eng/common/templates-official/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index bf09d2511..7870f93bc 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -1,73 +1,139 @@ parameters: + enable: 'false' # Whether the SDL validation job should execute or not overrideParameters: '' # Optional: to override values for parameters. additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named # 'continueOnError', the parameter value is not correctly picked up. # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter sdlContinueOnError: false # optional: determines whether to continue the build if the step errors; + # optional: determines if build artifacts should be downloaded. + downloadArtifacts: true + # optional: determines if this job should search the directory of downloaded artifacts for + # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks. + extractArchiveArtifacts: false dependsOn: '' # Optional: dependencies of the job artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts # Usage: # artifactNames: # - 'BlobArtifacts' # - 'Artifacts_Windows_NT_Release' + # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts, + # not pipeline artifacts, so doesn't affect the use of this parameter. + pipelineArtifactNames: [] jobs: - job: Run_SDL dependsOn: ${{ parameters.dependsOn }} displayName: Run SDL tool + condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true')) variables: - group: DotNet-VSTS-Bot + - name: AzDOProjectName + value: ${{ parameters.AzDOProjectName }} + - name: AzDOPipelineId + value: ${{ parameters.AzDOPipelineId }} + - name: AzDOBuildId + value: ${{ parameters.AzDOBuildId }} + - template: /eng/common/templates/variables/sdl-variables.yml + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + - template: /eng/common/templates/variables/pool-providers.yml pool: - name: Hosted VS2017 + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 steps: - checkout: self clean: true - - ${{ if ne(parameters.artifactNames, '') }}: - - ${{ each artifactName in parameters.artifactNames }}: + + # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars. + - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}: + - template: /eng/common/templates/post-build/setup-maestro-vars.yml + + - ${{ if ne(parameters.downloadArtifacts, 'false')}}: + - ${{ if ne(parameters.artifactNames, '') }}: + - ${{ each artifactName in parameters.artifactNames }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Build Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: ${{ artifactName }} + downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + - ${{ if eq(parameters.artifactNames, '') }}: - task: DownloadBuildArtifacts@0 displayName: Download Build Artifacts inputs: - buildType: current - artifactName: ${{ artifactName }} + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: specific files + itemPattern: "**" downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - - ${{ if eq(parameters.artifactNames, '') }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Build Artifacts + checkDownloadedFiles: true + + - ${{ each artifactName in parameters.pipelineArtifactNames }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Pipeline Artifacts inputs: - buildType: current - downloadType: specific files - itemPattern: "**" + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: ${{ artifactName }} downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + + - powershell: eng/common/sdl/trim-assets-version.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Trim the version from the NuGet packages + continueOnError: ${{ parameters.sdlContinueOnError }} + - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts - -ExtractPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts + -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts displayName: Extract Blob Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} + - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts - -ExtractPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts + -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts displayName: Extract Package Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} - - task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - - task: NuGetCommand@2 - displayName: 'Install Guardian' - inputs: - restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - feedsToUse: config - nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config - externalFeedCredentials: GuardianConnect - restoreDirectory: $(Build.SourcesDirectory)\.packages - - ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - - ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 - -GuardianPackageName Microsoft.Guardian.Cli.win10-x64.0.20.1 - -NugetPackageDirectory $(Build.SourcesDirectory)\.packages - -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) - ${{ parameters.additionalParameters }} - displayName: Execute SDL + + - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: + - powershell: eng/common/sdl/extract-artifact-archives.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Extract Archive Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} + + - template: /eng/common/templates/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: ${{ parameters.additionalParameters }} + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} diff --git a/eng/common/templates/job/generate-graph-files.yml b/eng/common/templates/job/generate-graph-files.yml deleted file mode 100644 index e54ce956f..000000000 --- a/eng/common/templates/job/generate-graph-files.yml +++ /dev/null @@ -1,48 +0,0 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - -jobs: -- job: Generate_Graph_Files - - dependsOn: ${{ parameters.dependsOn }} - - displayName: Generate Graph Files - - pool: ${{ parameters.pool }} - - variables: - # Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT - # DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat - - group: Publish-Build-Assets - - group: DotNet-AllOrgs-Darc-Pats - - name: _GraphArguments - value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT) - -azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat) - -barToken $(MaestroAccessToken) - -outputFolder '$(Build.StagingDirectory)/GraphFiles/' - - ${{ if ne(parameters.includeToolset, 'false') }}: - - name: _GraphArguments - value: ${{ variables._GraphArguments }} -includeToolset - - steps: - - task: PowerShell@2 - displayName: Generate Graph Files - inputs: - filePath: eng\common\generate-graph-files.ps1 - arguments: $(_GraphArguments) - continueOnError: true - - task: PublishBuildArtifacts@1 - displayName: Publish Graph to Artifacts - inputs: - PathtoPublish: '$(Build.StagingDirectory)/GraphFiles' - PublishLocation: Container - ArtifactName: GraphFiles - continueOnError: true - condition: always() diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index ffda80a19..8ec5c4f2d 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -1,66 +1,43 @@ +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + parameters: # Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job cancelTimeoutInMinutes: '' - condition: '' - - continueOnError: false - container: '' - + continueOnError: false dependsOn: '' - displayName: '' - - steps: [] - pool: '' - + steps: [] strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' + templateContext: '' - # Job base template specific parameters - # Optional: Enable installing Microbuild plugin - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _TeamName - the name of your team - # _SignType - 'test' or 'real' +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + artifacts: '' enableMicrobuild: false - - # Optional: Include PublishBuildArtifacts task enablePublishBuildArtifacts: false - - # Optional: Enable publishing to the build asset registry enablePublishBuildAssets: false - - # Optional: Prevent gather/push manifest from executing when using publishing pipelines - enablePublishUsingPipelines: false - - # Optional: Include PublishTestResults task enablePublishTestResults: false - - # Optional: enable sending telemetry - enableTelemetry: false - - # Optional: define the helix repo for telemetry (example: 'dotnet/arcade') - helixRepo: '' - - # Optional: define the helix type for telemetry (example: 'build/product/') - helixType: '' - - # Required: name of the job + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' name: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + preSteps: [] runAsPublic: false - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. +# Sbom related params + enableSbom: true + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' jobs: - job: ${{ parameters.name }} @@ -92,10 +69,20 @@ jobs: ${{ if ne(parameters.timeoutInMinutes, '') }}: timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + variables: - - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: - name: DOTNET_CLI_TELEMETRY_PROFILE value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 - ${{ each variable in parameters.variables }}: # handle name-value variable syntax # example: @@ -104,15 +91,25 @@ jobs: - ${{ if ne(variable.name, '') }}: - name: ${{ variable.name }} value: ${{ variable.value }} - + # handle variable groups - ${{ if ne(variable.group, '') }}: - group: ${{ variable.group }} + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + # handle key-value variable syntax. # example: # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}: + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - ${{ each pair in variable }}: - name: ${{ pair.key }} value: ${{ pair.value }} @@ -125,22 +122,13 @@ jobs: workspace: ${{ parameters.workspace }} steps: - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions - - task: sendStartTelemetry@0 - displayName: 'Send Helix Start Telemetry' - inputs: - helixRepo: ${{ parameters.helixRepo }} - ${{ if ne(parameters.helixType, '') }}: - helixType: ${{ parameters.helixType }} - buildConfig: $(_BuildConfig) - runAsPublic: ${{ parameters.runAsPublic }} - continueOnError: ${{ parameters.continueOnError }} - condition: always() + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildSigningPlugin@2 + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@3 displayName: Install MicroBuild plugin inputs: signType: $(_SignType) @@ -151,62 +139,121 @@ jobs: continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: NuGetAuthenticate@0 + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - ${{ each step in parameters.steps }}: - ${{ step }} + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/templates/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks + displayName: Execute Microbuild cleanup tasks condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) continueOnError: ${{ parameters.continueOnError }} env: TeamName: $(_TeamName) - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - # Telemetry tasks are built from https://github.com/dotnet/arcade-extensions - - task: sendEndTelemetry@0 - displayName: 'Send Helix End Telemetry' - continueOnError: ${{ parameters.continueOnError }} - condition: always() - - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - task: PublishBuildArtifacts@1 + displayName: Publish pipeline artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - publish: artifacts/log + artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: Publish logs + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - task: PublishBuildArtifacts@1 displayName: Publish Logs inputs: PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' PublishLocation: Container - ArtifactName: $(Agent.Os)_$(Agent.JobName) + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} continueOnError: true condition: always() - - ${{ if eq(parameters.enablePublishTestResults, 'true') }}: + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - task: PublishTestResults@2 - displayName: Publish Test Results + displayName: Publish XUnit Test Results inputs: testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' + testResultsFiles: '*.xml' searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} continueOnError: true condition: always() - - - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: CopyFiles@2 - displayName: Gather Asset Manifests + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' - TargetFolder: '$(Build.StagingDirectory)/AssetManifests' - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() - - task: PublishBuildArtifacts@1 - displayName: Push Asset Manifests - inputs: - PathtoPublish: '$(Build.StagingDirectory)/AssetManifests' - PublishLocation: Container - ArtifactName: AssetManifests - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration + artifact: BuildConfiguration + displayName: Publish build retry configuration + continueOnError: true diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml new file mode 100644 index 000000000..60ab00c4d --- /dev/null +++ b/eng/common/templates/job/onelocbuild.yml @@ -0,0 +1,109 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/templates/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml deleted file mode 100644 index f877fd7a8..000000000 --- a/eng/common/templates/job/performance.yml +++ /dev/null @@ -1,95 +0,0 @@ -parameters: - steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo) - variables: [] # optional -- list of additional variables to send to the template - jobName: '' # required -- job name - displayName: '' # optional -- display name for the job. Will use jobName if not passed - pool: '' # required -- name of the Build pool - container: '' # required -- name of the container - osGroup: '' # required -- operating system for the job - extraSetupParameters: '' # optional -- extra arguments to pass to the setup script - frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against - continueOnError: 'false' # optional -- determines whether to continue the build if the step errors - dependsOn: '' # optional -- dependencies of the job - timeoutInMinutes: 320 # optional -- timeout for the job - enableTelemetry: false # optional -- enable for telemetry - -jobs: -- template: ../jobs/jobs.yml - parameters: - dependsOn: ${{ parameters.dependsOn }} - enableTelemetry: ${{ parameters.enableTelemetry }} - enablePublishBuildArtifacts: true - continueOnError: ${{ parameters.continueOnError }} - - jobs: - - job: '${{ parameters.jobName }}' - - ${{ if ne(parameters.displayName, '') }}: - displayName: '${{ parameters.displayName }}' - ${{ if eq(parameters.displayName, '') }}: - displayName: '${{ parameters.jobName }}' - - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - variables: - - - ${{ each variable in parameters.variables }}: - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - - IsInternal: '' - - HelixApiAccessToken: '' - - HelixPreCommand: '' - - - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq( parameters.osGroup, 'Windows_NT') }}: - - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"' - - IsInternal: -Internal - - ${{ if ne(parameters.osGroup, 'Windows_NT') }}: - - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"' - - IsInternal: --internal - - - group: DotNet-HelixApi-Access - - group: dotnet-benchview - - workspace: - clean: all - pool: - ${{ parameters.pool }} - container: ${{ parameters.container }} - strategy: - matrix: - ${{ each framework in parameters.frameworks }}: - ${{ framework }}: - _Framework: ${{ framework }} - steps: - - checkout: self - clean: true - # Run all of the steps to setup repo - - ${{ each step in parameters.steps }}: - - ${{ step }} - - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }} - displayName: Performance Setup (Windows) - condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }} - displayName: Performance Setup (Unix) - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments) - displayName: Run ci setup script - # Run perf testing in helix - - template: /eng/common/templates/steps/perf-send-to-helix.yml - parameters: - HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)' - HelixAccessToken: $(HelixApiAccessToken) - HelixTargetQueues: $(Queue) - HelixPreCommands: $(HelixPreCommand) - Creator: $(Creator) - WorkItemTimeout: 4:00 # 4 hours - WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy - CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions \ No newline at end of file diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index b722975f9..cc2b346ba 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -23,20 +23,43 @@ parameters: # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing publishUsingPipelines: false + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + jobs: - job: Asset_Registry_Publish dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 - displayName: Publish to Build Asset Registry - - pool: ${{ parameters.pool }} + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry variables: + - template: /eng/common/templates/variables/pool-providers.yml - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - name: _BuildConfig - value: ${{ parameters.configuration }} - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + demands: ImageOverride -equals windows.vs2019.amd64 steps: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: @@ -45,25 +68,28 @@ jobs: inputs: artifactName: AssetManifests downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: NuGetAuthenticate@0 - - task: PowerShell@2 + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 displayName: Publish Build Assets inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 + arguments: > + -task PublishBuildAssets -restore -msbuildEngine dotnet /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:MaestroApiEndpoint=https://maestro.dot.net /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:Configuration=$(_BuildConfig) + /p:OfficialBuildId=$(Build.BuildNumber) condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: powershell@2 displayName: Create ReleaseConfigs Artifact inputs: @@ -72,20 +98,59 @@ jobs: Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) - + - task: PublishBuildArtifacts@1 displayName: Publish ReleaseConfigs Artifact inputs: PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' PublishLocation: Container ArtifactName: ReleaseConfigs - - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - task: PublishBuildArtifacts@1 - displayName: Publish Logs to VSTS + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: AzureCLI@2 + displayName: Publish Using Darc inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: $(Agent.Os)_PublishBuildAssets - continueOnError: true - condition: always() + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/templates/steps/publish-logs.yml + parameters: + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml new file mode 100644 index 000000000..c0ff472b6 --- /dev/null +++ b/eng/common/templates/job/source-build.yml @@ -0,0 +1,74 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + + # If set to true and running on a non-public project, + # Internal blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open + + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64 + + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - ${{ if eq(parameters.enableInternalSources, true) }}: + - template: /eng/common/templates/steps/enable-internal-runtimes.yml + - template: /eng/common/templates/steps/source-build.yml + parameters: + platform: ${{ parameters.platform }} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml new file mode 100644 index 000000000..0b6bb89dc --- /dev/null +++ b/eng/common/templates/job/source-index-stage1.yml @@ -0,0 +1,82 @@ +parameters: + runAsPublic: false + sourceIndexUploadPackageVersion: 2.0.0-20240502.12 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexUploadPackageVersion + value: ${{ parameters.sourceIndexUploadPackageVersion }} + - name: SourceIndexProcessBinlogPackageVersion + value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/templates/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET 8 SDK + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: AzureCLI@2 + displayName: Get stage 1 auth token + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId" + echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken" + echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId" + + - script: | + az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN) + displayName: "Login to Azure" + + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 + displayName: Upload stage1 artifacts to source index diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml new file mode 100644 index 000000000..f7dc5ea4a --- /dev/null +++ b/eng/common/templates/jobs/codeql-build.yml @@ -0,0 +1,31 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + +jobs: +- template: /eng/common/templates/jobs/jobs.yml + parameters: + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml index 6a2f98c03..289bb2396 100644 --- a/eng/common/templates/jobs/jobs.yml +++ b/eng/common/templates/jobs/jobs.yml @@ -1,48 +1,46 @@ parameters: - # Optional: 'true' if failures in job.yml job should not fail the job + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md continueOnError: false - # Optional: Enable installing Microbuild plugin - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _TeamName - the name of your team - # _SignType - 'test' or 'real' - enableMicrobuild: false - # Optional: Include PublishBuildArtifacts task enablePublishBuildArtifacts: false - # Optional: Enable publishing to the build asset registry - enablePublishBuildAssets: false - # Optional: Enable publishing using release pipelines enablePublishUsingPipelines: false - + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/templates/jobs/source-build.yml for options + sourceBuildParameters: [] + graphFileGeneration: # Optional: Enable generating the graph files at the end of the build enabled: false # Optional: Include toolset dependencies in the generated graph files includeToolset: false - # Optional: Include PublishTestResults task - enablePublishTestResults: false - - # Optional: enable sending telemetry - # if enabled then the 'helixRepo' parameter should also be specified - enableTelemetry: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job jobs: [] - # Optional: define the helix repo for telemetry (example: 'dotnet/arcade') - helixRepo: '' - # Optional: Override automatically derived dependsOn value for "publish build assets" job publishBuildAssetsDependsOn: '' + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + # Optional: should run as a public build even in the internal project # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. runAsPublic: false + enableSourceIndex: false + sourceIndexParams: {} + # Internal resources (telemetry, microbuild) can only be accessed from non-public projects, # and some (Microbuild) should only be applied to non-PR cases for internal builds. @@ -62,29 +60,38 @@ jobs: name: ${{ job.job }} -- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - template: ../job/publish-build-assets.yml +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/templates/jobs/source-build.yml parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - pool: - vmImage: vs2017-win2016 - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - -- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - template: ../job/generate-graph-files.yml + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml parameters: - continueOnError: ${{ parameters.continueOnError }} - includeToolset: ${{ parameters.graphFileGeneration.includeToolset }} - dependsOn: - - Asset_Registry_Publish - pool: - vmImage: vs2017-win2016 + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml new file mode 100644 index 000000000..5f46bfa89 --- /dev/null +++ b/eng/common/templates/jobs/source-build.yml @@ -0,0 +1,54 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/templates/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + + # If set to true and running on a non-public project, + # Internal nuget and blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/templates/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + enableInternalSources: ${{ parameters.enableInternalSources }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/templates/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} + enableInternalSources: ${{ parameters.enableInternalSources }} diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml deleted file mode 100644 index 0123cf43b..000000000 --- a/eng/common/templates/phases/base.yml +++ /dev/null @@ -1,130 +0,0 @@ -parameters: - # Optional: Clean sources before building - clean: true - - # Optional: Git fetch depth - fetchDepth: '' - - # Optional: name of the phase (not specifying phase name may cause name collisions) - name: '' - # Optional: display name of the phase - displayName: '' - - # Optional: condition for the job to run - condition: '' - - # Optional: dependencies of the phase - dependsOn: '' - - # Required: A defined YAML queue - queue: {} - - # Required: build steps - steps: [] - - # Optional: variables - variables: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - ## Telemetry variables - - # Optional: enable sending telemetry - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _HelixBuildConfig - differentiate between Debug, Release, other - # _HelixSource - Example: build/product - # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch) - enableTelemetry: false - - # Optional: Enable installing Microbuild plugin - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _TeamName - the name of your team - # _SignType - 'test' or 'real' - enableMicrobuild: false - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -phases: -- phase: ${{ parameters.name }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - queue: ${{ parameters.queue }} - - ${{ if ne(parameters.variables, '') }}: - variables: - ${{ insert }}: ${{ parameters.variables }} - - steps: - - checkout: self - clean: ${{ parameters.clean }} - ${{ if ne(parameters.fetchDepth, '') }}: - fetchDepth: ${{ parameters.fetchDepth }} - - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - - template: /eng/common/templates/steps/telemetry-start.yml - parameters: - buildConfig: $(_HelixBuildConfig) - helixSource: $(_HelixSource) - helixType: $(_HelixType) - runAsPublic: ${{ parameters.runAsPublic }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - # Internal only resource, and Microbuild signing shouldn't be applied to PRs. - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildSigningPlugin@2 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - - env: - TeamName: $(_TeamName) - continueOnError: false - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - # Run provided build steps - - ${{ parameters.steps }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - # Internal only resources - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - env: - TeamName: $(_TeamName) - - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - - template: /eng/common/templates/steps/telemetry-end.yml - parameters: - helixSource: $(_HelixSource) - helixType: $(_HelixType) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: CopyFiles@2 - displayName: Gather Asset Manifests - inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' - TargetFolder: '$(Build.StagingDirectory)/AssetManifests' - continueOnError: false - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) - - task: PublishBuildArtifacts@1 - displayName: Push Asset Manifests - inputs: - PathtoPublish: '$(Build.StagingDirectory)/AssetManifests' - PublishLocation: Container - ArtifactName: AssetManifests - continueOnError: false - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml deleted file mode 100644 index a0a807428..000000000 --- a/eng/common/templates/phases/publish-build-assets.yml +++ /dev/null @@ -1,51 +0,0 @@ -parameters: - dependsOn: '' - queue: {} - configuration: 'Debug' - condition: succeeded() - continueOnError: false - runAsPublic: false - publishUsingPipelines: false -phases: - - phase: Asset_Registry_Publish - displayName: Publish to Build Asset Registry - dependsOn: ${{ parameters.dependsOn }} - queue: ${{ parameters.queue }} - variables: - _BuildConfig: ${{ parameters.configuration }} - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: AzureKeyVault@1 - inputs: - azureSubscription: 'DotNet-Engineering-Services_KeyVault' - KeyVaultName: EngKeyVault - SecretsFilter: 'MaestroAccessToken' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:Configuration=$(_BuildConfig) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: PublishBuildArtifacts@1 - displayName: Publish Logs to VSTS - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: $(Agent.Os)_Asset_Registry_Publish - continueOnError: true - condition: always() diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml deleted file mode 100644 index ad9375f5e..000000000 --- a/eng/common/templates/post-build/channels/generic-internal-channel.yml +++ /dev/null @@ -1,139 +0,0 @@ -parameters: - publishInstallersAndChecksums: false - symbolPublishingAdditionalParameters: '' - stageName: '' - channelName: '' - channelId: '' - transportFeed: '' - shippingFeed: '' - symbolsFeed: '' - -stages: -- stage: ${{ parameters.stageName }} - dependsOn: validate - variables: - - template: ../common-variables.yml - displayName: ${{ parameters.channelName }} Publishing - jobs: - - template: ../setup-maestro-vars.yml - - - job: - displayName: Symbol Publishing - dependsOn: setupMaestroVars - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} )) - variables: - - group: DotNet-Symbol-Server-Pats - pool: - vmImage: 'windows-2019' - steps: - # This is necessary whenever we want to publish/restore to an AzDO private feed - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - artifactName: 'BlobArtifacts' - continueOnError: true - - - task: DownloadBuildArtifacts@0 - displayName: Download PDB Artifacts - inputs: - artifactName: 'PDBArtifacts' - continueOnError: true - - - task: PowerShell@2 - displayName: Publish - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet - /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat) - /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat) - /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/' - /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' - /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - /p:Configuration=Release - ${{ parameters.symbolPublishingAdditionalParameters }} - - - job: publish_assets - displayName: Publish Assets - dependsOn: setupMaestroVars - variables: - - group: DotNet-Blob-Feed - - group: AzureDevOps-Artifact-Feeds-Pats - - name: BARBuildId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] - - name: IsStableBuild - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ] - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }})) - pool: - vmImage: 'windows-2019' - steps: - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: current - artifactName: PackageArtifacts - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: current - artifactName: BlobArtifacts - - - task: DownloadBuildArtifacts@0 - displayName: Download Asset Manifests - inputs: - buildType: current - artifactName: AssetManifests - - - task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - - # This is necessary whenever we want to publish/restore to an AzDO private feed - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' - - - task: PowerShell@2 - displayName: Enable cross-org publishing - inputs: - filePath: eng\common\enable-cross-org-publishing.ps1 - arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) - - - task: PowerShell@2 - displayName: Publish Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet - /p:IsStableBuild=$(IsStableBuild) - /p:IsInternalBuild=$(IsInternalBuild) - /p:RepositoryName=$(Build.Repository.Name) - /p:CommitSha=$(Build.SourceVersion) - /p:NugetPath=$(NuGetExeToolPath) - /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)' - /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)' - /p:BARBuildId=$(BARBuildId) - /p:MaestroApiEndpoint='$(MaestroApiEndPoint)' - /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)' - /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/' - /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/' - /p:Configuration=Release - /p:PublishInstallersAndChecksums=true - /p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl) - /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey) - /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl) - /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey) - /p:PublishToAzureDevOpsNuGetFeeds=true - /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' - /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' - /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}' - /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - /p:PublishToMSDL=false - ${{ parameters.artifactsPublishingAdditionalParameters }} - - - template: ../../steps/promote-build.yml - parameters: - ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml deleted file mode 100644 index c4bc1897d..000000000 --- a/eng/common/templates/post-build/channels/generic-public-channel.yml +++ /dev/null @@ -1,148 +0,0 @@ -parameters: - artifactsPublishingAdditionalParameters: '' - publishInstallersAndChecksums: false - symbolPublishingAdditionalParameters: '' - stageName: '' - channelName: '' - channelId: '' - transportFeed: '' - shippingFeed: '' - symbolsFeed: '' - -stages: -- stage: ${{ parameters.stageName }} - dependsOn: validate - variables: - - template: ../common-variables.yml - displayName: ${{ parameters.channelName }} Publishing - jobs: - - template: ../setup-maestro-vars.yml - - - job: - displayName: Symbol Publishing - dependsOn: setupMaestroVars - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }} )) - variables: - - group: DotNet-Symbol-Server-Pats - pool: - vmImage: 'windows-2019' - steps: - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - artifactName: 'BlobArtifacts' - continueOnError: true - - - task: DownloadBuildArtifacts@0 - displayName: Download PDB Artifacts - inputs: - artifactName: 'PDBArtifacts' - continueOnError: true - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' - - - task: PowerShell@2 - displayName: Enable cross-org publishing - inputs: - filePath: eng\common\enable-cross-org-publishing.ps1 - arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) - - - task: PowerShell@2 - displayName: Publish - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet - /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat) - /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat) - /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/' - /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' - /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - /p:Configuration=Release - ${{ parameters.symbolPublishingAdditionalParameters }} - - - job: publish_assets - displayName: Publish Assets - dependsOn: setupMaestroVars - variables: - - name: BARBuildId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] - - name: IsStableBuild - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ] - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.channelId }})) - pool: - vmImage: 'windows-2019' - steps: - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: current - artifactName: PackageArtifacts - continueOnError: true - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: current - artifactName: BlobArtifacts - continueOnError: true - - - task: DownloadBuildArtifacts@0 - displayName: Download Asset Manifests - inputs: - buildType: current - artifactName: AssetManifests - - - task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - - # This is necessary whenever we want to publish/restore to an AzDO private feed - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' - - - task: PowerShell@2 - displayName: Enable cross-org publishing - inputs: - filePath: eng\common\enable-cross-org-publishing.ps1 - arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) - - - task: PowerShell@2 - displayName: Publish Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet - /p:ArtifactsCategory=$(_DotNetArtifactsCategory) - /p:IsStableBuild=$(IsStableBuild) - /p:IsInternalBuild=$(IsInternalBuild) - /p:RepositoryName=$(Build.Repository.Name) - /p:CommitSha=$(Build.SourceVersion) - /p:NugetPath=$(NuGetExeToolPath) - /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)' - /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)' - /p:BARBuildId=$(BARBuildId) - /p:MaestroApiEndpoint='$(MaestroApiEndPoint)' - /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)' - /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/' - /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/' - /p:Configuration=Release - /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }} - /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl) - /p:InstallersAzureAccountKey=$(dotnetcli-storage-key) - /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl) - /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key) - /p:PublishToAzureDevOpsNuGetFeeds=true - /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' - /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' - /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}' - /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' - ${{ parameters.artifactsPublishingAdditionalParameters }} - - - template: ../../steps/promote-build.yml - parameters: - ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml index 1883d2b17..173914f23 100644 --- a/eng/common/templates/post-build/common-variables.yml +++ b/eng/common/templates/post-build/common-variables.yml @@ -1,80 +1,22 @@ variables: - - group: AzureDevOps-Artifact-Feeds-Pats - - group: DotNet-Blob-Feed - - group: DotNet-DotNetCli-Storage - - group: DotNet-MSRC-Storage - group: Publish-Build-Assets - # .NET Core 3.1 Dev - - name: PublicDevRelease_31_Channel_Id - value: 128 - - # .NET Core 5 Dev - - name: NetCore_5_Dev_Channel_Id - value: 131 - - # .NET Eng - Validation - - name: Net_Eng_Validation_Channel_Id - value: 9 - - # .NET Eng - Latest - - name: Net_Eng_Latest_Channel_Id - value: 2 - - # .NET 3 Eng - Validation - - name: NET_3_Eng_Validation_Channel_Id - value: 390 - - # .NET 3 Eng - - name: NetCore_3_Tools_Channel_Id - value: 344 - - # .NET Core 3.1 Release - - name: PublicRelease_31_Channel_Id - value: 129 - - # General Testing - - name: GeneralTesting_Channel_Id - value: 529 - - # .NET Core 3.1 Blazor Features - - name: NetCore_31_Blazor_Features_Channel_Id - value: 531 - # Whether the build is internal or not - name: IsInternalBuild value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} # Default Maestro++ API Endpoint and API Version - name: MaestroApiEndPoint - value: "https://maestro-prod.westus2.cloudapp.azure.com" + value: "https://maestro.dot.net" - name: MaestroApiAccessToken value: $(MaestroAccessToken) - name: MaestroApiVersion - value: "2019-01-16" + value: "2020-02-20" - name: SourceLinkCLIVersion value: 3.0.0 - name: SymbolToolVersion value: 1.0.1 - # Feed Configurations - # These should include the suffix "/index.json" - - # Default locations for Installers and checksums - # Public Locations - - name: ChecksumsBlobFeedUrl - value: https://dotnetclichecksums.blob.core.windows.net/dotnet/index.json - - name: InstallersBlobFeedUrl - value: https://dotnetcli.blob.core.windows.net/dotnet/index.json - - # Private Locations - - name: InternalChecksumsBlobFeedUrl - value: https://dotnetclichecksumsmsrc.blob.core.windows.net/dotnet/index.json - - name: InternalChecksumsBlobFeedKey - value: $(dotnetclichecksumsmsrc-storage-key) - - - name: InternalInstallersBlobFeedUrl - value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json - - name: InternalInstallersBlobFeedKey - value: $(dotnetclimsrc-access-key) + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/templates/post-build/darc-gather-drop.yml b/eng/common/templates/post-build/darc-gather-drop.yml deleted file mode 100644 index 3268ccaa5..000000000 --- a/eng/common/templates/post-build/darc-gather-drop.yml +++ /dev/null @@ -1,23 +0,0 @@ -parameters: - ChannelId: 0 - -jobs: -- job: gatherDrop - displayName: Gather Drop - dependsOn: setupMaestroVars - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }})) - variables: - - name: BARBuildId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] - pool: - vmImage: 'windows-2019' - steps: - - task: PowerShell@2 - displayName: Darc gather-drop - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/darc-gather-drop.ps1 - arguments: -BarBuildId $(BARBuildId) - -DropLocation $(Agent.BuildDirectory)/Temp/Drop/ - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index 8ad0f9f66..c3b6a3012 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -1,392 +1,283 @@ parameters: - enableSourceLinkValidation: false - enableSigningValidation: true - enableSymbolValidation: false - enableNugetValidation: true - publishInstallersAndChecksums: false - SDLValidationParameters: - enable: false - continueOnError: false - params: '' - artifactNames: '' + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true # These parameters let the user customize the call to sdk-task.ps1 for publishing # symbols & general artifacts as well as for signing validation - symbolPublishingAdditionalParameters: '' - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' # Which stages should finish execution before post-build stages start - dependsOn: [build] + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false stages: -- stage: validate - dependsOn: ${{ parameters.dependsOn }} - displayName: Validate - jobs: - - ${{ if eq(parameters.enableNugetValidation, 'true') }}: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: common-variables.yml + - template: /eng/common/templates/variables/pool-providers.yml + jobs: - job: displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) pool: - vmImage: 'windows-2019' + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + - task: DownloadBuildArtifacts@0 displayName: Download Package Artifacts inputs: - buildType: current + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) artifactName: PackageArtifacts + checkDownloadedFiles: true - task: PowerShell@2 displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - - ${{ if eq(parameters.enableSigningValidation, 'true') }}: - job: displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) pool: - vmImage: 'windows-2019' + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 steps: - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@0 - displayName: 'Authenticate to AzDO Feeds' + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - task: DownloadBuildArtifacts@0 displayName: Download Package Artifacts inputs: - buildType: current + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. - task: PowerShell@2 displayName: Validate inputs: filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine dotnet + arguments: -task SigningValidation -restore -msbuildEngine vs /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - /p:Configuration=Release ${{ parameters.signingValidationAdditionalParameters }} - - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}: + - template: ../steps/publish-logs.yml + parameters: + StageLabel: 'Validation' + JobLabel: 'Signing' + - job: displayName: SourceLink Validation - variables: - - template: common-variables.yml + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') pool: - vmImage: 'windows-2019' + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + - task: DownloadBuildArtifacts@0 displayName: Download Blob Artifacts inputs: - buildType: current + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) artifactName: BlobArtifacts + checkDownloadedFiles: true - task: PowerShell@2 displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) -GHCommit $(Build.SourceVersion) -SourcelinkCliVersion $(SourceLinkCLIVersion) continueOnError: true - - ${{ if eq(parameters.SDLValidationParameters.enable, 'true') }}: - template: /eng/common/templates/job/execute-sdl.yml parameters: + enable: ${{ parameters.SDLValidationParameters.enable }} + publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }} additionalParameters: ${{ parameters.SDLValidationParameters.params }} continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }} artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }} + downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }} + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: common-variables.yml + - template: /eng/common/templates/variables/pool-providers.yml + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_Dev31_Publish' - channelName: '.NET Core 3.1 Dev' - channelId: 128 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'Net_Eng_Latest_Publish' - channelName: '.NET Eng - Latest' - channelId: 2 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'Net_Eng_Validation_Publish' - channelName: '.NET Eng - Validation' - channelId: 9 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_3_Tools_Validation_Publish' - channelName: '.NET 3 Tools - Validation' - channelId: 390 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_3_Tools_Publish' - channelName: '.NET 3 Tools' - channelId: 344 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_Release31_Publish' - channelName: '.NET Core 3.1 Release' - channelId: 129 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_Blazor31_Features_Publish' - channelName: '.NET Core 3.1 Blazor Features' - channelId: 531 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-blazor-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NetCore_31_Internal_Servicing_Publishing' - channelName: '.NET Core 3.1 Internal Servicing' - channelId: 550 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'General_Testing_Publish' - channelName: 'General Testing' - channelId: 529 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_Tooling_Dev_Publishing' - channelName: '.NET Core Tooling Dev' - channelId: 548 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_Tooling_Release_Publishing' - channelName: '.NET Core Tooling Release' - channelId: 549 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_311xx_Publishing' - channelName: '.NET Core SDK 3.1.1xx' - channelId: 560 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_311xx_Internal_Publishing' - channelName: '.NET Core SDK 3.1.1xx Internal' - channelId: 559 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_312xx_Publishing' - channelName: '.NET Core SDK 3.1.2xx' - channelId: 558 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_312xx_Internal_Publishing' - channelName: '.NET Core SDK 3.1.2xx Internal' - channelId: 557 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_313xx_Publishing' - channelName: '.NET Core SDK 3.1.3xx' - channelId: 759 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_313xx_Internal_Publishing' - channelName: '.NET Core SDK 3.1.3xx Internal' - channelId: 760 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_314xx_Publishing' - channelName: '.NET Core SDK 3.1.4xx' - channelId: 921 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'NETCore_SDK_314xx_Internal_Publishing' - channelName: '.NET Core SDK 3.1.4xx Internal' - channelId: 922 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'VS16_6_Publishing' - channelName: 'VS 16.6' - channelId: 1010 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'VS16_7_Publishing' - channelName: 'VS 16.7' - channelId: 1011 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'VS16_8_Publishing' - channelName: 'VS 16.8' - channelId: 1154 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' - -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'VS16_9_Publishing' - channelName: 'VS 16.9' - channelId: 1473 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' + - task: NuGetAuthenticate@1 -- template: \eng\common\templates\post-build\channels\generic-public-channel.yml - parameters: - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }} - symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }} - stageName: 'VS_Master_Publishing' - channelName: 'VS Master' - channelId: 1012 - transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json' - shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json' - symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json' \ No newline at end of file + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/templates/post-build/promote-build.yml b/eng/common/templates/post-build/promote-build.yml deleted file mode 100644 index 6b479c3b8..000000000 --- a/eng/common/templates/post-build/promote-build.yml +++ /dev/null @@ -1,25 +0,0 @@ -parameters: - ChannelId: 0 - -jobs: -- job: - displayName: Promote Build - dependsOn: setupMaestroVars - condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }})) - variables: - - name: BARBuildId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] - - name: ChannelId - value: ${{ parameters.ChannelId }} - pool: - vmImage: 'windows-2019' - steps: - - task: PowerShell@2 - displayName: Add Build to Channel - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1 - arguments: -BuildId $(BARBuildId) - -ChannelId $(ChannelId) - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml index 56242b068..64b9abc68 100644 --- a/eng/common/templates/post-build/setup-maestro-vars.yml +++ b/eng/common/templates/post-build/setup-maestro-vars.yml @@ -1,18 +1,70 @@ -jobs: -- job: setupMaestroVars - displayName: Setup Maestro Vars - pool: - vmImage: 'windows-2019' - steps: +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + +steps: + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - task: DownloadBuildArtifacts@0 displayName: Download Release Configs inputs: buildType: current artifactName: ReleaseConfigs + checkDownloadedFiles: true - - task: PowerShell@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/setup-maestro-vars.ps1 - arguments: -ReleaseConfigsPath '$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt' + - task: AzureCLI@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: pscore + scriptLocation: inlineScript + inlineScript: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + . $(Build.SourcesDirectory)\eng\common\tools.ps1 + $darc = Get-Darc + $buildInfo = & $darc get-build ` + --id ${{ parameters.BARBuildId }} ` + --extended ` + --output-format json ` + --ci ` + | convertFrom-Json + + $BarId = ${{ parameters.BARBuildId }} + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml new file mode 100644 index 000000000..f67a210d6 --- /dev/null +++ b/eng/common/templates/steps/add-build-to-channel.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Add Build to Channel + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 + arguments: -BuildId $(BARBuildId) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroApiAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml new file mode 100644 index 000000000..cbba05967 --- /dev/null +++ b/eng/common/templates/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates/steps/enable-internal-runtimes.yml b/eng/common/templates/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..54dc9416c --- /dev/null +++ b/eng/common/templates/steps/enable-internal-runtimes.yml @@ -0,0 +1,28 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +parameters: +- name: federatedServiceConnection + type: string + default: 'dotnetbuilds-internal-read' +- name: outputVariableName + type: string + default: 'dotnetbuilds-internal-container-read-token-base64' +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: true + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - template: /eng/common/templates/steps/get-delegation-sas.yml + parameters: + federatedServiceConnection: ${{ parameters.federatedServiceConnection }} + outputVariableName: ${{ parameters.outputVariableName }} + expiryInHours: ${{ parameters.expiryInHours }} + base64Encode: ${{ parameters.base64Encode }} + storageAccount: dotnetbuilds + container: internal + permissions: rl diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml new file mode 100644 index 000000000..3930b1630 --- /dev/null +++ b/eng/common/templates/steps/execute-codeql.yml @@ -0,0 +1,32 @@ +parameters: + # Language that should be analyzed. Defaults to csharp + language: csharp + # Build Commands + buildCommands: '' + overrideParameters: '' # Optional: to override values for parameters. + additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' + # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named + # 'continueOnError', the parameter value is not correctly picked up. + # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter + # optional: determines whether to continue the build if the step errors; + sdlContinueOnError: false + +steps: +- template: /eng/common/templates/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: '${{ parameters.additionalParameters }} + -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml new file mode 100644 index 000000000..fe0ebf8c9 --- /dev/null +++ b/eng/common/templates/steps/execute-sdl.yml @@ -0,0 +1,89 @@ +parameters: + overrideGuardianVersion: '' + executeAllSdlToolsScript: '' + overrideParameters: '' + additionalParameters: '' + publishGuardianDirectoryToPipeline: false + sdlContinueOnError: false + condition: '' + +steps: +- task: NuGetAuthenticate@1 + +- task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian (Overridden) + +- ${{ if eq(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian + +- ${{ if ne(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} + displayName: Execute SDL (Overridden) + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + env: + GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken) + +- ${{ if eq(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianCliLocation $(GuardianCliLocation) + -NugetPackageDirectory $(Build.SourcesDirectory)\.packages + ${{ parameters.additionalParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + env: + GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken) + +- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() + + # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration + # with the "SARIF SAST Scans Tab" Azure DevOps extension + - task: CopyFiles@2 + displayName: Copy SARIF files + inputs: + flattenFolders: true + sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ + contents: '**/*.sarif' + targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs + condition: succeededOrFailed() + + # Use PublishBuildArtifacts because the SARIF extension only checks this case + # see microsoft/sarif-azuredevops-extension#4 + - task: PublishBuildArtifacts@1 + displayName: Publish SARIF files to CodeAnalysisLogs container + inputs: + pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs + artifactName: CodeAnalysisLogs + condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml new file mode 100644 index 000000000..2b21eae42 --- /dev/null +++ b/eng/common/templates/steps/generate-sbom.yml @@ -0,0 +1,48 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 8.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- task: PublishPipelineArtifact@1 + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + inputs: + targetPath: '${{parameters.manifestDirPath}}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/templates/steps/get-delegation-sas.yml b/eng/common/templates/steps/get-delegation-sas.yml new file mode 100644 index 000000000..c690cc0a0 --- /dev/null +++ b/eng/common/templates/steps/get-delegation-sas.yml @@ -0,0 +1,52 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: false +- name: storageAccount + type: string +- name: container + type: string +- name: permissions + type: string + default: 'rl' + +steps: +- task: AzureCLI@2 + displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}' + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + # Calculate the expiration of the SAS token and convert to UTC + $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") + + # Temporarily work around a helix issue where SAS tokens with / in them will cause incorrect downloads + # of correlation payloads. https://github.com/dotnet/dnceng/issues/3484 + $sas = "" + do { + $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + } while($sas.IndexOf('/') -ne -1) + + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + + if ('${{ parameters.base64Encode }}' -eq 'true') { + $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas)) + } + + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas" diff --git a/eng/common/templates/steps/get-federated-access-token.yml b/eng/common/templates/steps/get-federated-access-token.yml new file mode 100644 index 000000000..55e33bd38 --- /dev/null +++ b/eng/common/templates/steps/get-federated-access-token.yml @@ -0,0 +1,40 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: stepName + type: string + default: 'getFederatedAccessToken' +- name: condition + type: string + default: '' +# Resource to get a token for. Common values include: +# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps +# - 'https://storage.azure.com/' for storage +# Defaults to Azure DevOps +- name: resource + type: string + default: '499b84ac-1321-427f-aa17-267ca6975798' +- name: isStepOutputVariable + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Getting federated access token for feeds' + name: ${{ parameters.stepName }} + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to get access token for resource '${{ parameters.resource }}'" + exit 1 + } + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken" \ No newline at end of file diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml new file mode 100644 index 000000000..88f238f36 --- /dev/null +++ b/eng/common/templates/steps/publish-logs.yml @@ -0,0 +1,23 @@ +parameters: + StageLabel: '' + JobLabel: '' + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' + PublishLocation: Container + ArtifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml new file mode 100644 index 000000000..83d97a26a --- /dev/null +++ b/eng/common/templates/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 05df886f5..3eb7e2d5f 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -3,33 +3,33 @@ parameters: HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group HelixConfiguration: '' # optional -- additional property attached to a job HelixPreCommands: '' # optional -- commands to run before Helix work item execution HelixPostCommands: '' # optional -- commands to run after Helix work item execution WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in seconds for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json - EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) Creator: '' # optional -- if the build is external, use this to specify who is sending the job DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' displayName: ${{ parameters.DisplayNamePrefix }} (Windows) env: BuildConfig: $(_BuildConfig) @@ -53,13 +53,13 @@ steps: IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog displayName: ${{ parameters.DisplayNamePrefix }} (Unix) env: BuildConfig: $(_BuildConfig) @@ -83,8 +83,8 @@ steps: IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} Creator: ${{ parameters.Creator }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml new file mode 100644 index 000000000..41bbb9157 --- /dev/null +++ b/eng/common/templates/steps/source-build.yml @@ -0,0 +1,129 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/source-build/self/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- task: PublishPipelineArtifact@1 + displayName: Publish BuildLogs + inputs: + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml index 32c01ef0b..6abbcb33a 100644 --- a/eng/common/templates/steps/telemetry-start.yml +++ b/eng/common/templates/steps/telemetry-start.yml @@ -8,7 +8,7 @@ parameters: steps: - ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}: - - task: AzureKeyVault@1 + - task: AzureKeyVault@2 inputs: azureSubscription: 'HelixProd_KeyVault' KeyVaultName: HelixProdKV diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml new file mode 100644 index 000000000..d236f9fdb --- /dev/null +++ b/eng/common/templates/variables/pool-providers.yml @@ -0,0 +1,57 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) and $ (DncEngPublicBuildPool), e.g.: +# +# pool: +# name: $(DncEngInternalBuildPool) +# demands: ImageOverride -equals windows.vs2019.amd64 + +variables: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + - name: DncEngPublicBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore-Svc-Public' + ), + False, + 'NetCore-Public' + ) + ] + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] diff --git a/eng/common/templates/variables/sdl-variables.yml b/eng/common/templates/variables/sdl-variables.yml new file mode 100644 index 000000000..dbdd66d4a --- /dev/null +++ b/eng/common/templates/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index f50507a06..60352ede1 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -5,11 +5,13 @@ [bool]$ci = if (Test-Path variable:ci) { $ci } else { $false } # Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. -[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" } +[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' } + +# Set to true to opt out of outputting binary log while running in CI +[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false } # Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. -# Binary log must be enabled on CI. -[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci } +[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog } # Set to true to use the pipelines logger which will enable Azure logging output. # https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md @@ -24,7 +26,7 @@ [bool]$restore = if (Test-Path variable:restore) { $restore } else { $true } # Adjusts msbuild verbosity level. -[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" } +[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' } # Set to true to reuse msbuild nodes. Recommended to not reuse on CI. [bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci } @@ -40,23 +42,31 @@ [bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dot.net/v1/dotnet-install.ps1 -[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { "v1" } +# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1 +[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } # True to use global NuGet cache instead of restoring packages to repository-local directory. [bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci } +# True to exclude prerelease versions Visual Studio during build +[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false } + # An array of names of processes to stop on script exit if prepareMachine is true. -$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") } +$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') } + +$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null } set-strictmode -version 2.0 -$ErrorActionPreference = "Stop" +$ErrorActionPreference = 'Stop' [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 -function Create-Directory([string[]] $path) { - if (!(Test-Path $path)) { - New-Item -path $path -force -itemType "Directory" | Out-Null - } +# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first. +[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null } +# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed +[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } + +function Create-Directory ([string[]] $path) { + New-Item -Path $path -Force -ItemType 'Directory' | Out-Null } function Unzip([string]$zipfile, [string]$outpath) { @@ -96,7 +106,50 @@ function Exec-Process([string]$command, [string]$commandArgs) { } } -function InitializeDotNetCli([bool]$install) { +# Take the given block, print it, print what the block probably references from the current set of +# variables using low-effort string matching, then run the block. +# +# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes, +# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less +# maintainable and less reliable. It is easy to make a mistake and modify the command without +# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of +# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in +# existing source code can also be difficult, because the strings are not aligned to each other and +# the line may be 300+ columns long. +# +# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues. +# +# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically. +# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it +# doesn't print any info about the variables being used by the command, which is normally the +# interesting part to diagnose. +function Exec-BlockVerbosely([scriptblock] $block) { + Write-Host "--- Running script block:" + $blockString = $block.ToString().Trim() + Write-Host $blockString + + Write-Host "--- List of variables that might be used:" + # For each variable x in the environment, check the block for a reference to x via simple "$x" or + # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x", + # among others). It only catches what this function was originally written for: simple + # command-line commands. + $variableTable = Get-Variable | + Where-Object { + $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)") + } | + Format-Table -AutoSize -HideTableHeaders -Wrap | + Out-String + Write-Host $variableTable.Trim() + + Write-Host "--- Executing:" + & $block + Write-Host "--- Done running script block!" +} + +# createSdkLocationFile parameter enables a file being generated under the toolset directory +# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations +# as dot sourcing isn't possible. +function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { if (Test-Path variable:global:_DotNetInstallDir) { return $global:_DotNetInstallDir } @@ -131,16 +184,16 @@ function InitializeDotNetCli([bool]$install) { # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. - if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { + if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { $dotnetRoot = $env:DOTNET_INSTALL_DIR } else { - $dotnetRoot = Join-Path $RepoRoot ".dotnet" + $dotnetRoot = Join-Path $RepoRoot '.dotnet' if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) { if ($install) { InstallDotNetSdk $dotnetRoot $dotnetSdkVersion } else { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" ExitWithExitCode 1 } } @@ -148,6 +201,24 @@ function InitializeDotNetCli([bool]$install) { $env:DOTNET_INSTALL_DIR = $dotnetRoot } + # Creates a temporary file under the toolset dir. + # The following code block is protecting against concurrent access so that this function can + # be called in parallel. + if ($createSdkLocationFile) { + do { + $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName()) + } + until (!(Test-Path $sdkCacheFileTemp)) + Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot + + try { + Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt') + } catch { + # Somebody beat us + Remove-Item -Path $sdkCacheFileTemp + } + } + # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom # build steps from using anything other than what we've downloaded. # It also ensures that VS msbuild will use the downloaded sdk targets. @@ -156,43 +227,84 @@ function InitializeDotNetCli([bool]$install) { # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build Write-PipelinePrependPath -Path $dotnetRoot - # Work around issues with Azure Artifacts credential provider - # https://github.com/dotnet/arcade/issues/3932 - if ($ci) { - $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 - $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' - } - Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1' return $global:_DotNetInstallDir = $dotnetRoot } +function Retry($downloadBlock, $maxRetries = 5) { + $retries = 1 + + while($true) { + try { + & $downloadBlock + break + } + catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + } + + if (++$retries -le $maxRetries) { + $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff + Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." + Start-Sleep -Seconds $delayInSeconds + } + else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." + break + } + + } +} + function GetDotNetInstallScript([string] $dotnetRoot) { - $installScript = Join-Path $dotnetRoot "dotnet-install.ps1" + $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' if (!(Test-Path $installScript)) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - Invoke-WebRequest "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1" -OutFile $installScript + $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" + + Retry({ + Write-Host "GET $uri" + Invoke-WebRequest $uri -OutFile $installScript + }) } return $installScript } -function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") { - InstallDotNet $dotnetRoot $version $architecture +function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) { + InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath } -function InstallDotNet([string] $dotnetRoot, - [string] $version, - [string] $architecture = "", - [string] $runtime = "", - [bool] $skipNonVersionedFiles = $false, - [string] $runtimeSourceFeed = "", - [string] $runtimeSourceFeedKey = "") { +function InstallDotNet([string] $dotnetRoot, + [string] $version, + [string] $architecture = '', + [string] $runtime = '', + [bool] $skipNonVersionedFiles = $false, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $noPath) { + + $dotnetVersionLabel = "'sdk v$version'" + + if ($runtime -ne '' -and $runtime -ne 'sdk') { + $runtimePath = $dotnetRoot + $runtimePath = $runtimePath + "\shared" + if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" } + if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" } + if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" } + $runtimePath = $runtimePath + "\" + $version + + $dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'" + + if (Test-Path $runtimePath) { + Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed." + $installSuccess = $true + Exit + } + } $installScript = GetDotNetInstallScript $dotnetRoot $installParameters = @{ @@ -203,33 +315,46 @@ function InstallDotNet([string] $dotnetRoot, if ($architecture) { $installParameters.Architecture = $architecture } if ($runtime) { $installParameters.Runtime = $runtime } if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles } - - try { - & $installScript @installParameters + if ($noPath) { $installParameters.NoPath = $True } + + $variations = @() + $variations += @($installParameters) + + $dotnetBuilds = $installParameters.Clone() + $dotnetbuilds.AzureFeed = "https://ci.dot.net/public" + $variations += @($dotnetBuilds) + + if ($runtimeSourceFeed) { + $runtimeSource = $installParameters.Clone() + $runtimeSource.AzureFeed = $runtimeSourceFeed + if ($runtimeSourceFeedKey) { + $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey) + $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes) + $runtimeSource.FeedCredential = $decodedString + } + $variations += @($runtimeSource) } - catch { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from public location." - - # Only the runtime can be installed from a custom [private] location. - if ($runtime -and ($runtimeSourceFeed -or $runtimeSourceFeedKey)) { - if ($runtimeSourceFeed) { $installParameters.AzureFeed = $runtimeSourceFeed } - if ($runtimeSourceFeedKey) { - $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey) - $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes) - $installParameters.FeedCredential = $decodedString - } - - try { - & $installScript @installParameters - } - catch { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Failed to install dotnet runtime '$runtime' from custom location '$runtimeSourceFeed'." - ExitWithExitCode 1 - } + $installSuccess = $false + foreach ($variation in $variations) { + if ($variation | Get-Member AzureFeed) { + $location = $variation.AzureFeed } else { - ExitWithExitCode 1 + $location = "public location"; } + Write-Host " Attempting to install $dotnetVersionLabel from $location." + try { + & $installScript @variation + $installSuccess = $true + break + } + catch { + Write-Host " Failed to install $dotnetVersionLabel from $location." + } + } + if (-not $installSuccess) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 } } @@ -253,17 +378,33 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = return $global:_MSBuildExe } - if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" } + # Minimum VS version to require. + $vsMinVersionReqdStr = '17.7' + $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr) + + # If the version of msbuild is going to be xcopied, + # use this version. Version matches a package here: + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2 + $defaultXCopyMSBuildVersion = '17.8.1-2' + + if (!$vsRequirements) { + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + $vsRequirements = $GlobalJson.tools.vs + } + else { + $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr } + } + } + $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } $vsMinVersion = [Version]::new($vsMinVersionStr) # Try msbuild command available in the environment. if ($env:VSINSTALLDIR -ne $null) { - $msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue + $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue if ($msbuildCmd -ne $null) { # Workaround for https://github.com/dotnet/roslyn/issues/35793 # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+ - $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0]) + $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0]) if ($msbuildVersion -ge $vsMinVersion) { return $global:_MSBuildExe = $msbuildCmd.Path @@ -277,28 +418,57 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # Locate Visual Studio installation or download x-copy msbuild. $vsInfo = LocateVisualStudio $vsRequirements if ($vsInfo -ne $null) { - $vsInstallDir = $vsInfo.installationPath + # Ensure vsInstallDir has a trailing slash + $vsInstallDir = Join-Path $vsInfo.installationPath "\" $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion } else { - if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") { + if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') { $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild' $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] } else { - $vsMajorVersion = $vsMinVersion.Major - $xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha" + #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download + if($vsMinVersion -lt $vsMinVersionReqd){ + Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible" + $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion + $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] + } + else{ + # If the VS version IS compatible, look for an xcopy msbuild package + # with a version matching VS. + # Note: If this version does not exist, then an explicit version of xcopy msbuild + # can be specified in global.json. This will be required for pre-release versions of msbuild. + $vsMajorVersion = $vsMinVersion.Major + $vsMinorVersion = $vsMinVersion.Minor + $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0" + } } - $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install + $vsInstallDir = $null + if ($xcopyMSBuildVersion.Trim() -ine "none") { + $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install + if ($vsInstallDir -eq $null) { + throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'." + } + } if ($vsInstallDir -eq $null) { - throw "Unable to find Visual Studio that has required version and components installed" + throw 'Unable to find Visual Studio that has required version and components installed' } } $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" } - return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe" + + $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin" + $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false } + if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) { + $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe" + } else { + $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe" + } + + return $global:_MSBuildExe } function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) { @@ -317,7 +487,7 @@ function InstallXCopyMSBuild([string]$packageVersion) { } function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { - $packageName = "RoslynTools.MSBuild" + $packageName = 'RoslynTools.MSBuild' $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion" $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg" @@ -327,13 +497,17 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { } Create-Directory $packageDir + Write-Host "Downloading $packageName $packageVersion" $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + Retry({ + Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + }) + Unzip $packagePath $packageDir } - return Join-Path $packageDir "tools" + return Join-Path $packageDir 'tools' } # @@ -357,29 +531,35 @@ function LocateVisualStudio([object]$vsRequirements = $null){ if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { $vswhereVersion = $GlobalJson.tools.vswhere } else { - $vswhereVersion = "2.5.2" + $vswhereVersion = '2.5.2' } $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" - $vsWhereExe = Join-Path $vsWhereDir "vswhere.exe" + $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe' if (!(Test-Path $vsWhereExe)) { Create-Directory $vsWhereDir - Write-Host "Downloading vswhere" - Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe + Write-Host 'Downloading vswhere' + Retry({ + Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe + }) } if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild", "-products", "*") + $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') - if (Get-Member -InputObject $vsRequirements -Name "version") { - $args += "-version" + if (!$excludePrereleaseVS) { + $args += '-prerelease' + } + + if (Get-Member -InputObject $vsRequirements -Name 'version') { + $args += '-version' $args += $vsRequirements.version } - if (Get-Member -InputObject $vsRequirements -Name "components") { + if (Get-Member -InputObject $vsRequirements -Name 'components') { foreach ($component in $vsRequirements.components) { - $args += "-requires" + $args += '-requires' $args += $component } } @@ -396,7 +576,13 @@ function LocateVisualStudio([object]$vsRequirements = $null){ function InitializeBuildTool() { if (Test-Path variable:global:_BuildTool) { - return $global:_BuildTool + # If the requested msbuild parameters do not match, clear the cached variables. + if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) { + Remove-Item variable:global:_BuildTool + Remove-Item variable:global:_MSBuildExe + } else { + return $global:_BuildTool + } } if (-not $msbuildEngine) { @@ -405,28 +591,36 @@ function InitializeBuildTool() { # Initialize dotnet cli if listed in 'tools' $dotnetRoot = $null - if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") { + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { $dotnetRoot = InitializeDotNetCli -install:$restore } - if ($msbuildEngine -eq "dotnet") { + if ($msbuildEngine -eq 'dotnet') { if (!$dotnetRoot) { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "/global.json must specify 'tools.dotnet'." + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'." ExitWithExitCode 1 } $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') - $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' } + + # Use override if it exists - commonly set by source-build + if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) { + $initializeBuildToolFramework="net8.0" + } else { + $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework + } + + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework } } elseif ($msbuildEngine -eq "vs") { try { $msbuildPath = InitializeVisualStudioMSBuild -install:$restore } catch { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_ + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ ExitWithExitCode 1 } - $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" } + $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS } } else { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." ExitWithExitCode 1 } @@ -435,26 +629,29 @@ function InitializeBuildTool() { function GetDefaultMSBuildEngine() { # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows. - if (Get-Member -InputObject $GlobalJson.tools -Name "vs") { - return "vs" + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + return 'vs' } - if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") { - return "dotnet" + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + return 'dotnet' } - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." ExitWithExitCode 1 } function GetNuGetPackageCachePath() { if ($env:NUGET_PACKAGES -eq $null) { - # Use local cache on CI to ensure deterministic build, + # Use local cache on CI to ensure deterministic build. + # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116 # use global cache in dev builds to avoid cost of downloading packages. + # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968 if ($useGlobalNuGetCache) { - $env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages" + $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' } else { - $env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages" + $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' + $env:RESTORENOCACHE = $true } } @@ -467,17 +664,25 @@ function GetSdkTaskProject([string]$taskName) { } function InitializeNativeTools() { - if (Get-Member -InputObject $GlobalJson -Name "native-tools") { + if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) { $nativeArgs= @{} if ($ci) { $nativeArgs = @{ InstallDirectory = "$ToolsDir" } } + if ($env:NativeToolsOnMachine) { + Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..." + $nativeArgs += @{ PathPromotion = $true } + } & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs } } +function Read-ArcadeSdkVersion() { + return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' +} + function InitializeToolset() { if (Test-Path variable:global:_ToolsetBuildProj) { return $global:_ToolsetBuildProj @@ -485,7 +690,7 @@ function InitializeToolset() { $nugetCache = GetNuGetPackageCachePath - $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' + $toolsetVersion = Read-ArcadeSdkVersion $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt" if (Test-Path $toolsetLocationFile) { @@ -496,20 +701,20 @@ function InitializeToolset() { } if (-not $restore) { - Write-PipelineTelemetryError -Category "InitializeToolset" -Message "Toolset version $toolsetVersion has not been restored." + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored." ExitWithExitCode 1 } $buildTool = InitializeBuildTool - $proj = Join-Path $ToolsetDir "restore.proj" - $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" } + $proj = Join-Path $ToolsetDir 'restore.proj' + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' } '' | Set-Content $proj MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile - $path = Get-Content $toolsetLocationFile -TotalCount 1 + $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1 if (!(Test-Path $path)) { throw "Invalid toolset path: $path" } @@ -524,8 +729,19 @@ function ExitWithExitCode([int] $exitCode) { exit $exitCode } +# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for +# diagnostics, then exit the script with the $LASTEXITCODE. +function Exit-IfNZEC([string] $category = "General") { + Write-Host "Exit code $LASTEXITCODE" + if ($LASTEXITCODE -ne 0) { + $message = "Last command failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category $category -Message $message + ExitWithExitCode $LASTEXITCODE + } +} + function Stop-Processes() { - Write-Host "Killing running build processes..." + Write-Host 'Killing running build processes...' foreach ($processName in $processesToStopOnExit) { Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process } @@ -540,16 +756,40 @@ function MSBuild() { if ($pipelinesLog) { $buildTool = InitializeBuildTool - # Work around issues with Azure Artifacts credential provider - # https://github.com/dotnet/arcade/issues/3932 - if ($ci -and $buildTool.Tool -eq "dotnet") { - dotnet nuget locals http-cache -c + if ($ci -and $buildTool.Tool -eq 'dotnet') { + $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 + $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' } + Enable-Nuget-EnhancedRetry + $toolsetBuildProject = InitializeToolset - $path = Split-Path -parent $toolsetBuildProject - $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll") - $args += "/logger:$path" + $basePath = Split-Path -parent $toolsetBuildProject + $possiblePaths = @( + # new scripts need to work with old packages, so we need to look for the old names/versions + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), + (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll')) + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll')) + (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')) + ) + $selectedPath = $null + foreach ($path in $possiblePaths) { + if (Test-Path $path -PathType Leaf) { + $selectedPath = $path + break + } + } + if (-not $selectedPath) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.' + ExitWithExitCode 1 + } + $args += "/logger:$selectedPath" } MSBuild-Core @args @@ -562,45 +802,62 @@ function MSBuild() { # function MSBuild-Core() { if ($ci) { - if (!$binaryLog) { - Write-PipelineTaskError -Message "Binary log must be enabled in CI build." + if (!$binaryLog -and !$excludeCIBinarylog) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.' ExitWithExitCode 1 } if ($nodeReuse) { - Write-PipelineTaskError -Message "Node reuse must be disabled in CI build." + Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.' ExitWithExitCode 1 } } + Enable-Nuget-EnhancedRetry + $buildTool = InitializeBuildTool $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" if ($warnAsError) { - $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true" + $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true' } else { - $cmdArgs += " /p:TreatWarningsAsErrors=false" + $cmdArgs += ' /p:TreatWarningsAsErrors=false' } foreach ($arg in $args) { - if ($arg -ne $null -and $arg.Trim() -ne "") { + if ($null -ne $arg -and $arg.Trim() -ne "") { + if ($arg.EndsWith('\')) { + $arg = $arg + "\" + } $cmdArgs += " `"$arg`"" } } + $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs" + $exitCode = Exec-Process $buildTool.Path $cmdArgs if ($exitCode -ne 0) { - Write-PipelineTaskError -Message "Build failed." + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red $buildLog = GetMSBuildBinaryLogCommandLineArgument $args - if ($buildLog -ne $null) { + if ($null -ne $buildLog) { Write-Host "See log: $buildLog" -ForegroundColor DarkGray } - ExitWithExitCode $exitCode + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) { + Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + } else { + ExitWithExitCode $exitCode + } } } @@ -608,12 +865,12 @@ function GetMSBuildBinaryLogCommandLineArgument($arguments) { foreach ($argument in $arguments) { if ($argument -ne $null) { $arg = $argument.Trim() - if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) { - return $arg.Substring("/bl:".Length) + if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) { + return $arg.Substring('/bl:'.Length) } - if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) { - return $arg.Substring("/binaryLogger:".Length) + if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) { + return $arg.Substring('/binaryLogger:'.Length) } } } @@ -634,16 +891,26 @@ function IsWindowsPlatform() { return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT } +function Get-Darc($version) { + $darcPath = "$TempDir\darc\$([guid]::NewGuid())" + if ($version -ne $null) { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host + } else { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host + } + return "$darcPath\darc.exe" +} + . $PSScriptRoot\pipeline-logging-functions.ps1 -$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..") -$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..") -$ArtifactsDir = Join-Path $RepoRoot "artifacts" -$ToolsetDir = Join-Path $ArtifactsDir "toolset" -$ToolsDir = Join-Path $RepoRoot ".tools" -$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration -$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration -$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\') +$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') +$ArtifactsDir = Join-Path $RepoRoot 'artifacts' +$ToolsetDir = Join-Path $ArtifactsDir 'toolset' +$ToolsDir = Join-Path $RepoRoot '.tools' +$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration +$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration +$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json # true if global.json contains a "runtimes" section $globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false } @@ -656,3 +923,35 @@ Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir Write-PipelineSetVariable -Name 'TMP' -Value $TempDir + +# Import custom tools configuration, if present in the repo. +# Note: Import in global scope so that the script set top-level variables without qualification. +if (!$disableConfigureToolsetImport) { + $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1' + if (Test-Path $configureToolsetScript) { + . $configureToolsetScript + if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) { + if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) { + Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code' + ExitWithExitCode $LastExitCode + } + } + } +} + +# +# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. +# +function Enable-Nuget-EnhancedRetry() { + if ($ci) { + Write-Host "Setting NUGET enhanced retry environment variables" + $env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = 'true' + $env:NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT = 6 + $env:NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS = 1000 + $env:NUGET_RETRY_HTTP_429 = 'true' + Write-PipelineSetVariable -Name 'NUGET_ENABLE_ENHANCED_HTTP_RETRY' -Value 'true' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT' -Value '6' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000' + Write-PipelineSetVariable -Name 'NUGET_RETRY_HTTP_429' -Value 'true' + } +} diff --git a/eng/common/tools.sh b/eng/common/tools.sh index acbb0c5b3..b9b329ce3 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -18,9 +18,17 @@ fi # Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. configuration=${configuration:-'Debug'} +# Set to true to opt out of outputting binary log while running in CI +exclude_ci_binary_log=${exclude_ci_binary_log:-false} + +if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then + binary_log_default=true +else + binary_log_default=false +fi + # Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. -# Binary log must be enabled on CI. -binary_log=${binary_log:-$ci} +binary_log=${binary_log:-$binary_log_default} # Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). prepare_machine=${prepare_machine:-false} @@ -41,12 +49,12 @@ fi # Configures warning treatment in msbuild. warn_as_error=${warn_as_error:-true} -# True to attempt using .NET Core already that meets requirements specified in global.json +# True to attempt using .NET Core already that meets requirements specified in global.json # installed on the machine instead of downloading one. use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dot.net/v1/dotnet-install.sh +# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -56,6 +64,10 @@ else use_global_nuget_cache=${use_global_nuget_cache:-true} fi +# Used when restoring .NET SDK from alternative feeds +runtime_source_feed=${runtime_source_feed:-''} +runtime_source_feed_key=${runtime_source_feed_key:-''} + # Resolve any symlinks in the given path. function ResolvePath { local path=$1 @@ -77,16 +89,16 @@ function ResolvePath { function ReadGlobalVersion { local key=$1 - local line=`grep -m 1 "$key" "$global_json_file"` - local pattern="\"$key\" *: *\"(.*)\"" + if command -v jq &> /dev/null; then + _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")" + elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then + _ReadGlobalVersion=${BASH_REMATCH[1]} + fi - if [[ ! $line =~ $pattern ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Error: Cannot find \"$key\" in $global_json_file" + if [[ -z "$_ReadGlobalVersion" ]]; then + Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file" ExitWithExitCode 1 fi - - # return value - _ReadGlobalVersion=${BASH_REMATCH[1]} } function InitializeDotNetCli { @@ -152,15 +164,6 @@ function InitializeDotNetCli { # build steps from using anything other than what we've downloaded. Write-PipelinePrependPath -path "$dotnet_root" - # Work around issues with Azure Artifacts credential provider - # https://github.com/dotnet/arcade/issues/3932 - if [[ "$ci" == true ]]; then - export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 - export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 - Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" - Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" - fi - Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1" @@ -171,73 +174,130 @@ function InitializeDotNetCli { function InstallDotNetSdk { local root=$1 local version=$2 - local architecture="" - if [[ $# == 3 ]]; then + local architecture="unset" + if [[ $# -ge 3 ]]; then architecture=$3 fi - InstallDotNet "$root" "$version" $architecture + InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key } function InstallDotNet { local root=$1 local version=$2 - + local runtime=$4 + + local dotnetVersionLabel="'$runtime v$version'" + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + runtimePath="$root" + runtimePath="$runtimePath/shared" + case "$runtime" in + dotnet) + runtimePath="$runtimePath/Microsoft.NETCore.App" + ;; + aspnetcore) + runtimePath="$runtimePath/Microsoft.AspNetCore.App" + ;; + windowsdesktop) + runtimePath="$runtimePath/Microsoft.WindowsDesktop.App" + ;; + *) + ;; + esac + runtimePath="$runtimePath/$version" + + dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'" + + if [ -d "$runtimePath" ]; then + echo " Runtime toolset '$runtime/$architecture v$version' already installed." + local installSuccess=1 + return + fi + fi + GetDotNetInstallScript "$root" local install_script=$_GetDotNetInstallScript - local archArg='' - if [[ -n "${3:-}" ]]; then - archArg="--architecture $3" + local installParameters=(--version $version --install-dir "$root") + + if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then + installParameters+=(--architecture $3) fi - local runtimeArg='' - if [[ -n "${4:-}" ]]; then - runtimeArg="--runtime $4" + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + installParameters+=(--runtime $4) fi - - local skipNonVersionedFilesArg="" - if [[ "$#" -ge "5" ]]; then - skipNonVersionedFilesArg="--skip-non-versioned-files" + if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then + installParameters+=(--skip-non-versioned-files) fi - bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || { - local exit_code=$? - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from public location (exit code '$exit_code')." - - if [[ -n "$runtimeArg" ]]; then - local runtimeSourceFeed='' - if [[ -n "${6:-}" ]]; then - runtimeSourceFeed="--azure-feed $6" - fi - local runtimeSourceFeedKey='' - if [[ -n "${7:-}" ]]; then - # The 'base64' binary on alpine uses '-d' and doesn't support '--decode' - # '-d'. To work around this, do a simple detection and switch the parameter - # accordingly. - decodeArg="--decode" - if base64 --help 2>&1 | grep -q "BusyBox"; then - decodeArg="-d" - fi - decodedFeedKey=`echo $7 | base64 $decodeArg` - runtimeSourceFeedKey="--feed-credential $decodedFeedKey" - fi + local variations=() # list of variable names with parameter arrays in them - if [[ -n "$runtimeSourceFeed" || -n "$runtimeSourceFeedKey" ]]; then - bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg $runtimeSourceFeed $runtimeSourceFeedKey || { - local exit_code=$? - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from custom location '$runtimeSourceFeed' (exit code '$exit_code')." - ExitWithExitCode $exit_code - } - else - ExitWithExitCode $exit_code + local public_location=("${installParameters[@]}") + variations+=(public_location) + + local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public") + variations+=(dotnetbuilds) + + if [[ -n "${6:-}" ]]; then + variations+=(private_feed) + local private_feed=("${installParameters[@]}" --azure-feed $6) + if [[ -n "${7:-}" ]]; then + # The 'base64' binary on alpine uses '-d' and doesn't support '--decode' + # '-d'. To work around this, do a simple detection and switch the parameter + # accordingly. + decodeArg="--decode" + if base64 --help 2>&1 | grep -q "BusyBox"; then + decodeArg="-d" fi + decodedFeedKey=`echo $7 | base64 $decodeArg` + private_feed+=(--feed-credential $decodedFeedKey) fi - } + fi + + local installSuccess=0 + for variationName in "${variations[@]}"; do + local name="$variationName[@]" + local variation=("${!name}") + echo " Attempting to install $dotnetVersionLabel from $variationName." + bash "$install_script" "${variation[@]}" && installSuccess=1 + if [[ "$installSuccess" -eq 1 ]]; then + break + fi + + echo " Failed to install $dotnetVersionLabel from $variationName." + done + + if [[ "$installSuccess" -eq 0 ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 + fi +} + +function with_retries { + local maxRetries=5 + local retries=1 + echo "Trying to run '$@' for maximum of $maxRetries attempts." + while [[ $((retries++)) -le $maxRetries ]]; do + "$@" + + if [[ $? == 0 ]]; then + echo "Ran '$@' successfully." + return 0 + fi + + timeout=$((3**$retries-1)) + echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2 + sleep $timeout + done + + echo "Failed to execute '$@' for $maxRetries times." 1>&2 + + return 1 } function GetDotNetInstallScript { local root=$1 local install_script="$root/dotnet-install.sh" - local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh" + local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" if [[ ! -a "$install_script" ]]; then mkdir -p "$root" @@ -246,13 +306,21 @@ function GetDotNetInstallScript { # Use curl if available, otherwise use wget if command -v curl > /dev/null; then + # first, try directly, if this fails we will retry with verbose logging curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || { - local exit_code=$? - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." - ExitWithExitCode $exit_code + if command -v openssl &> /dev/null; then + echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation" + echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 + fi + echo "Will now retry the same URL with verbose logging." + with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || { + local exit_code=$? + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." + ExitWithExitCode $exit_code + } } - else - wget -q -O "$install_script" "$install_script_url" || { + else + with_retries wget -v -O "$install_script" "$install_script_url" || { local exit_code=$? Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." ExitWithExitCode $exit_code @@ -267,21 +335,28 @@ function InitializeBuildTool { if [[ -n "${_InitializeBuildTool:-}" ]]; then return fi - + InitializeDotNetCli $restore # return values - _InitializeBuildTool="$_InitializeDotNetCli/dotnet" + _InitializeBuildTool="$_InitializeDotNetCli/dotnet" _InitializeBuildToolCommand="msbuild" - _InitializeBuildToolFramework="netcoreapp2.1" + # use override if it exists - commonly set by source-build + if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then + _InitializeBuildToolFramework="net8.0" + else + _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}" + fi } +# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116 function GetNuGetPackageCachePath { if [[ -z ${NUGET_PACKAGES:-} ]]; then if [[ "$use_global_nuget_cache" == true ]]; then export NUGET_PACKAGES="$HOME/.nuget/packages" else export NUGET_PACKAGES="$repo_root/.packages" + export RESTORENOCACHE=true fi fi @@ -290,6 +365,9 @@ function GetNuGetPackageCachePath { } function InitializeNativeTools() { + if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then + return + fi if grep -Fq "native-tools" $global_json_file then local nativeArgs="" @@ -332,14 +410,14 @@ function InitializeToolset { if [[ "$binary_log" == true ]]; then bl="/bl:$log_dir/ToolsetRestore.binlog" fi - + echo '' > "$proj" MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file" local toolset_build_proj=`cat "$toolset_location_file"` if [[ ! -a "$toolset_build_proj" ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Invalid toolset path: $toolset_build_proj" + Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj" ExitWithExitCode 3 fi @@ -367,15 +445,36 @@ function MSBuild { InitializeBuildTool InitializeToolset - # Work around issues with Azure Artifacts credential provider - # https://github.com/dotnet/arcade/issues/3932 if [[ "$ci" == true ]]; then - dotnet nuget locals http-cache -c + export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 + export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 + Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" + Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" fi local toolset_dir="${_InitializeToolset%/*}" - local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" - args=( "${args[@]}" "-logger:$logger_path" ) + # new scripts need to work with old packages, so we need to look for the old names/versions + local selectedPath= + local possiblePaths=() + possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + for path in "${possiblePaths[@]}"; do + if [[ -f $path ]]; then + selectedPath=$path + break + fi + done + if [[ -z "$selectedPath" ]]; then + Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly." + ExitWithExitCode 1 + fi + args+=( "-logger:$selectedPath" ) fi MSBuild-Core ${args[@]} @@ -383,13 +482,13 @@ function MSBuild { function MSBuild-Core { if [[ "$ci" == true ]]; then - if [[ "$binary_log" != true ]]; then - Write-PipelineTaskError "Binary log must be enabled in CI build." + if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then + Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch." ExitWithExitCode 1 fi if [[ "$node_reuse" == true ]]; then - Write-PipelineTaskError "Node reuse must be disabled in CI build." + Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build." ExitWithExitCode 1 fi fi @@ -401,11 +500,39 @@ function MSBuild-Core { warnaserror_switch="/warnaserror" fi - "$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || { - local exit_code=$? - Write-PipelineTaskError "Build failed (exit code '$exit_code')." - ExitWithExitCode $exit_code + function RunBuildTool { + export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@" + + "$_InitializeBuildTool" "$@" || { + local exit_code=$? + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + echo "Build failed with exit code $exit_code. Check errors above." + + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then + Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + else + ExitWithExitCode $exit_code + fi + } } + + RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" +} + +function GetDarc { + darc_path="$temp_dir/darc" + version="$1" + + if [[ -n "$version" ]]; then + version="--darcversion $version" + fi + + "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version } ResolvePath "${BASH_SOURCE[0]}" @@ -415,23 +542,27 @@ _script_dir=`dirname "$_ResolvePath"` eng_root=`cd -P "$_script_dir/.." && pwd` repo_root=`cd -P "$_script_dir/../.." && pwd` -artifacts_dir="$repo_root/artifacts" +repo_root="${repo_root}/" +artifacts_dir="${repo_root}artifacts" toolset_dir="$artifacts_dir/toolset" -tools_dir="$repo_root/.tools" +tools_dir="${repo_root}.tools" log_dir="$artifacts_dir/log/$configuration" temp_dir="$artifacts_dir/tmp/$configuration" -global_json_file="$repo_root/global.json" +global_json_file="${repo_root}global.json" # determine if global.json contains a "runtimes" entry global_json_has_runtimes=false -dotnetlocal_key=`grep -m 1 "runtimes" "$global_json_file"` || true -if [[ -n "$dotnetlocal_key" ]]; then +if command -v jq &> /dev/null; then + if jq -e '.tools | has("runtimes")' "$global_json_file" &> /dev/null; then + global_json_has_runtimes=true + fi +elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then global_json_has_runtimes=true fi # HOME may not be defined in some scenarios, but it is required by NuGet if [[ -z $HOME ]]; then - export HOME="$repo_root/artifacts/.home/" + export HOME="${repo_root}artifacts/.home/" mkdir -p "$HOME" fi @@ -444,3 +575,18 @@ Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir" Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir" Write-PipelineSetVariable -name "Temp" -value "$temp_dir" Write-PipelineSetVariable -name "TMP" -value "$temp_dir" + +# Import custom tools configuration, if present in the repo. +if [ -z "${disable_configure_toolset_import:-}" ]; then + configure_toolset_script="$eng_root/configure-toolset.sh" + if [[ -a "$configure_toolset_script" ]]; then + . "$configure_toolset_script" + fi +fi + +# TODO: https://github.com/dotnet/arcade/issues/1468 +# Temporary workaround to avoid breaking change. +# Remove once repos are updated. +if [[ -n "${useInstalledDotNetCli:-}" ]]; then + use_installed_dotnet_cli="$useInstalledDotNetCli" +fi From 9b1a0b82eb77ecdbb4fb03641f30352b3c12f7f4 Mon Sep 17 00:00:00 2001 From: "Ihor.Zhuravlov" Date: Fri, 3 Jan 2025 17:46:31 +0100 Subject: [PATCH 4/5] Update of Arcade: Modified versions, and merged default configuration of Arcade with Dotnet.Spark --- LICENSE | 6 ++++-- eng/Version.Details.xml | 5 +++-- eng/Versions.props | 4 ++-- global.json | 10 ++++++++-- src/csharp/Directory.Build.props | 19 ++++++++++++------- src/csharp/Directory.Build.targets | 7 +++---- 6 files changed, 32 insertions(+), 19 deletions(-) diff --git a/LICENSE b/LICENSE index 02a0812ba..984713a49 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,8 @@ -MIT License +The MIT License (MIT) -Copyright (c) 2019 .NET Foundation +Copyright (c) .NET Foundation and Contributors + +All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ddffc212e..0be3354f6 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,9 +3,10 @@ - + https://github.com/dotnet/arcade - 5fd50687c9a9f39bd2ee8221165ea9c1b3f565d9 + db87887481d4110c09a1004191002482fdd7e4f2 + diff --git a/eng/Versions.props b/eng/Versions.props index e3a8cf7f9..1fe11ba76 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -1,6 +1,6 @@ - - + + 2.1.1 prerelease diff --git a/global.json b/global.json index 6a00584c6..6097f2d80 100644 --- a/global.json +++ b/global.json @@ -1,8 +1,14 @@ { + "sdk": { + "version": "8.0.404", + "rollForward": "latestFeature" + }, "tools": { "dotnet": "8.0.404" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.20230.5" + "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24359.3", + "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.24359.3", + "Microsoft.Build.NoTargets": "3.7.0" } -} +} \ No newline at end of file diff --git a/src/csharp/Directory.Build.props b/src/csharp/Directory.Build.props index ec0affcac..67b64a34a 100644 --- a/src/csharp/Directory.Build.props +++ b/src/csharp/Directory.Build.props @@ -1,9 +1,8 @@ - - + true - + @@ -16,14 +15,20 @@ true 4 .NET for Apache Spark - + $(CopyrightNetFoundation) LICENSE true https://github.com/dotnet/spark https://github.com/dotnet/spark - true - snupkg + embedded + true + Latest + + + false @@ -36,4 +41,4 @@ PackagePath="\" Visible="false" /> - + \ No newline at end of file diff --git a/src/csharp/Directory.Build.targets b/src/csharp/Directory.Build.targets index c00601eaa..fd8f58948 100644 --- a/src/csharp/Directory.Build.targets +++ b/src/csharp/Directory.Build.targets @@ -1,12 +1,11 @@ - - + all runtime; build; native; contentfiles; analyzers - - + + \ No newline at end of file From 7003f9cd3b847d53a32b59c9acff0ebd5563b5b2 Mon Sep 17 00:00:00 2001 From: "Ihor.Zhuravlov" Date: Tue, 7 Jan 2025 09:16:31 +0100 Subject: [PATCH 5/5] Removed unneeded vs code configuration file, and added it to gitignore --- .gitignore | 2 ++ .vscode/settings.json | 5 ----- 2 files changed, 2 insertions(+), 5 deletions(-) delete mode 100644 .vscode/settings.json diff --git a/.gitignore b/.gitignore index 7b325efc4..4d1bda938 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,8 @@ bld/ # Visual Studio 2015/2017 cache/options directory .vs/ +# Visual Studio Code configuration directory +.vscode/ # Uncomment if you have tasks that create the project's static files in wwwroot #wwwroot/ diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 32cfc61d2..000000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "files.watcherExclude": { - "**/target": true - } -} \ No newline at end of file