diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..233246089
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,49 @@
+###############################################################################
+# Set default behavior to:
+# automatically normalize line endings on check-in, and
+# convert to Windows-style line endings on check-out
+###############################################################################
+* text=auto encoding=UTF-8
+*.sh text eol=lf
+
+###############################################################################
+# Set file behavior to:
+# treat as text, and
+# diff as C# source code
+###############################################################################
+*.cs text diff=csharp
+
+###############################################################################
+# Set file behavior to:
+# treat as text
+###############################################################################
+*.cmd text
+*.config text
+*.csproj text
+*.groovy text
+*.json text
+*.md text
+*.nuspec text
+*.pkgdef text
+*.proj text
+*.projitems text
+*.props text
+*.ps1 text
+*.resx text
+*.ruleset text
+*.shproj text
+*.sln text
+*.targets text
+*.vb text
+*.vbproj text
+*.vcxproj text
+*.vcxproj.filters text
+*.vsct text
+*.vsixmanifest text
+
+###############################################################################
+# Set file behavior to:
+# treat as binary
+###############################################################################
+*.png binary
+*.snk binary
diff --git a/.gitignore b/.gitignore
index 3e759b75b..8a642acf1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,98 @@
+# Arcade directories
+.dotnet/
+.packages/
+.tools/
+artifacts/
+
+# MSBuild log files
+msbuild.log
+msbuild.err
+msbuild.wrn
+
+# Cross building rootfs
+cross/rootfs/
+cross/android-rootfs/
+# add x86 as it is ignored in 'Build results'
+!cross/x86
+
+# Others
+[Ss]tyle[Cc]op.*
+node_modules/
+*.metaproj
+*.metaproj.tmp
+bin.localpkg/
+
+# Windows image file caches
+Thumbs.db
+ehthumbs.db
+
+# Folder config file
+Desktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# KDE directory preferences
+.directory
+
+### OSX ###
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear on external disk
+.Spotlight-V100
+.Trashes
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+# vim temporary files
+[._]*.s[a-w][a-z]
+[._]s[a-w][a-z]
+*.un~
+Session.vim
+.netrwhist
+*~
+
+# Visual Studio Code
+.vscode/
+
+# Private test configuration and binaries.
+config.ps1
+**/IISApplications
+
+# VS generated files
+launchSettings.json
+
+# Snapcraft files
+.snapcraft
+*.snap
+parts/
+prime/
+stage/
+
+################################################################################
+
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
@@ -161,9 +256,6 @@ DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
-# Click-Once directory
-publish/
-
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 000000000..63bed2642
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,4 @@
+Contributing
+============
+
+See [Contributing](https://github.com/dotnet/corefx/blob/master/Documentation/project-docs/contributing.md) for information about coding styles, source structure, making pull requests, and more.
diff --git a/Directory.Build.props b/Directory.Build.props
new file mode 100644
index 000000000..a67312c42
--- /dev/null
+++ b/Directory.Build.props
@@ -0,0 +1,365 @@
+
+
+
+
+
+
+
+
+
+ $(BaseIntermediateOutputPath)
+
+
+
+ $([System.IO.Path]::GetDirectoryName('$(ArcadeSdkBuildTasksAssembly)'))\..\
+ $(ArcadeSdkMSBuildProjectDir)Sign.proj
+
+
+
+
+ AnyCPU
+ true
+ true
+
+
+
+
+ $(RepoRoot)
+ $(RepoRoot)src/
+
+
+ $(ArtifactsBinDir)
+ $(ArtifactsObjDir)
+
+
+ $(NuGetPackageRoot)
+
+ $(ObjDir)local-build-tasks\
+ $(LocalBuildToolsDir)netstandard2.0\
+ $(LocalBuildToolsDir)net46\
+
+ $(LocalBuildToolsTaskDir)local.tasks.dll
+
+
+
+ $(ObjDir)HostMachineInfo.props
+
+
+
+
+ windowsdesktop
+
+
+
+
+ true
+
+
+
+ Microsoft.NETCore.App
+ .NETCoreApp
+ $(NETCoreAppFrameworkIdentifier),Version=v$(NETCoreAppFrameworkVersion)
+ .NET Core $(NETCoreAppFrameworkVersion)
+
+
+
+
+ true
+
+ true
+
+
+
+ x64
+ $(TargetArchitecture)
+
+
+
+
+
+ OSX
+ $(OS)
+
+
+
+ Debug
+ $(ConfigurationGroup)
+ $(OSGroup)_$(Configuration)
+
+
+
+
+ Debug
+ Release
+ Debug
+
+ Windows_NT
+ Unix
+ Linux
+ OSX
+ FreeBSD
+ NetBSD
+ AnyOS
+
+
+
+ Portable
+
+
+
+ false
+ false
+ false
+
+ https://github.com/dotnet/windowsdesktop
+
+
+
+
+
+
+ true
+ false
+ $(DefineConstants),DEBUG,TRACE
+
+
+
+
+ true
+ true
+ $(DefineConstants),TRACE
+
+
+
+
+ $(ConfigurationErrorMsg);Unknown ConfigurationGroup [$(ConfigurationGroup)] specificed in your project.
+
+
+
+
+
+ .exe
+
+
+
+
+
+ $(HostMachineRid.Remove($(HostMachineRid.LastIndexOf('-'))))-$(TargetArchitecture)
+
+
+
+
+ win-$(TargetArchitecture)
+ osx-$(TargetArchitecture)
+ linux-$(TargetArchitecture)
+ freebsd-$(TargetArchitecture)
+
+
+
+ $(OutputRid)
+
+
+
+
+ $(DotNetOutputBlobFeedDir)assets/
+
+
+
+
+ $(OutputRid).$(ConfigurationGroup)
+
+ $(BinDir)$(OSPlatformConfig)\
+ $(BaseOutputRootPath)crossgen\
+
+ $(ObjDir)$(OSPlatformConfig)\
+
+ $(BinDir)$(OSPlatformConfig)/
+ $(PackagesBasePath)symbols/
+
+ $(ArtifactsShippingPackagesDir)
+ $(ArtifactsShippingPackagesDir)
+
+
+ $(IntermediateOutputRootPath)sharedFrameworkPublish/
+ $(IntermediateOutputRootPath)sharedFrameworkPublish.symbols\
+
+
+
+ false
+
+ true
+ $(AssetOutputPath)sharedfx_$(OutputRid)_$(ConfigurationGroup)_version_badge.svg
+
+
+
+
+ 4
+ true
+
+
+
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+ false
+
+
+
+
+ true
+
+
+
+
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+ true
+
+
+
+
+ true
+ true
+
+
+
+
+
+ .zip
+ .tar.gz
+ .msi
+ .pkg
+ .deb
+ .rpm
+ .exe
+ $(InstallerExtension)
+
+
+
+
+ $(OutputRid)
+ osx.10.10-x64
+ rhel.7-x64
+
+
+
+
+ true
+ true
+ false
+ false
+
+
+ <_TargetFrameworkDirectories>$(MSBuildThisFileDirectory)documentation
+ <_FullFrameworkReferenceAssemblyPaths>$(MSBuildThisFileDirectory)documentation
+
+
+
+ true
+
+
+
+
+ true
+
+
+
\ No newline at end of file
diff --git a/Directory.Build.targets b/Directory.Build.targets
new file mode 100644
index 000000000..c6dd4f565
--- /dev/null
+++ b/Directory.Build.targets
@@ -0,0 +1,97 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(MajorVersion).$(MinorVersion)
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ true
+ true
+
+ true
+ true
+ true
+
+ -$(VersionSuffix)
+ $(MajorVersion).$(MinorVersion)
+ $(ProductBandVersion).$(PatchVersion)
+ $(ProductionVersion)$(ProductVersionSuffix)
+
+ $(ProductVersion)
+
+ $(SharedFrameworkNugetVersion)
+
+ WindowsDesktop/$(SharedFrameworkNugetVersion)/
+ WindowsDesktop/$(SharedFrameworkNugetVersion)/
+
+
+
+ $(SharedFrameworkNugetVersion)-$(PackageTargetRid)
+ $(HostResolverVersion)-$(PackageTargetRid)
+
+
+
+
+
+
+
+
+
+
+
+
+
+<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <HostMachineRid>$(HostMachineRid)</HostMachineRid>
+ </PropertyGroup>
+</Project>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ N/A
+
+
+
+
diff --git a/NuGet.config b/NuGet.config
new file mode 100644
index 000000000..1ef108f21
--- /dev/null
+++ b/NuGet.config
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/README.md b/README.md
index d59075b6a..ab02b95cc 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,43 @@
-# windowsdesktop
\ No newline at end of file
+# .NET Windows Desktop Runtime
+
+This repo contains the code to build the .NET Windows Desktop Runtime for all
+supported platforms.
+
+## Officially Released Builds
+
+They can be downloaded from [here](https://www.microsoft.com/net/download#core).
+
+## Daily Builds
+
+
+
+
+| Platform | Master |
+| --- | :---: |
+| **Windows (x64)** | [![][win-x64-badge-master]][win-x64-version-master]
[Installer][win-x64-installer-master] ([Checksum][win-x64-installer-checksum-master])
[zip][win-x64-zip-master] ([Checksum][win-x64-zip-checksum-master]) |
+| **Windows (x86)** | [![][win-x86-badge-master]][win-x86-version-master]
[Installer][win-x86-installer-master] ([Checksum][win-x86-installer-checksum-master])
[zip][win-x86-zip-master] ([Checksum][win-x86-zip-checksum-master]) |
+
+
+
+
+
+[win-x64-badge-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/sharedfx_win-x64_Release_version_badge.svg
+[win-x64-version-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/latest.version
+[win-x64-installer-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x64.exe
+[win-x64-installer-checksum-master]: https://dotnetclichecksums.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x64.exe.sha512
+[win-x64-zip-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x64.zip
+[win-x64-zip-checksum-master]: https://dotnetclichecksums.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x64.zip.sha512
+
+
+[win-x86-badge-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/sharedfx_win-x86_Release_version_badge.svg
+[win-x86-version-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/latest.version
+[win-x86-installer-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x86.exe
+[win-x86-installer-checksum-master]: https://dotnetclichecksums.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x86.exe.sha512
+[win-x86-zip-master]: https://dotnetcli.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x86.zip
+[win-x86-zip-checksum-master]: https://dotnetclichecksums.blob.core.windows.net/dotnet/WindowsDesktop/master/windowsdesktop-runtime-latest-win-x86.zip.sha512
+
+
diff --git a/Subsets.props b/Subsets.props
new file mode 100644
index 000000000..a97a55de0
--- /dev/null
+++ b/Subsets.props
@@ -0,0 +1,111 @@
+
+
+
+
+
+ $(Subset.ToLowerInvariant())
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
new file mode 100644
index 000000000..49517f870
--- /dev/null
+++ b/azure-pipelines.yml
@@ -0,0 +1,98 @@
+trigger:
+ batch: true
+ branches:
+ include:
+ - master
+
+pr:
+- master
+
+# Use "5" prefix to avoid colliding with Core-Setup builds that still produce WindowsDesktop.
+name: $(Date:yyyyMMdd)$(Rev:.5r)
+
+variables:
+ - name: TeamName
+ value: dotnet-core-acquisition
+ # Skip Running CI tests
+ - name: SkipTests
+ value: false
+ # Set Official Build Id
+ - name: OfficialBuildId
+ value: $(Build.BuildNumber)
+
+ # Set the target blob feed for package publish during official and validation builds.
+ - name: _DotNetArtifactsCategory
+ value: .NETCore
+ - name: _DotNetValidationArtifactsCategory
+ value: .NETCoreValidation
+
+ # Produce test-signed build for PR and Public builds
+ - ${{ if or(eq(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'PullRequest')) }}:
+ - name: SignType
+ value: test
+
+ # Set up non-PR build from internal project
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - name: SignType
+ value: $[ coalesce(variables.OfficialSignType, 'real') ]
+
+stages:
+- stage: Build
+ jobs:
+
+ # Windows x64
+ - template: /eng/jobs/windows-build.yml
+ parameters:
+ name: Windows_x64
+ publishRidAgnosticPackages: true
+ targetArchitecture: x64
+
+ # Windows x86
+ - template: /eng/jobs/windows-build.yml
+ parameters:
+ name: Windows_x86
+ targetArchitecture: x86
+
+- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - stage: PrepareForPublish
+ displayName: Prepare for Publish
+ dependsOn: Build
+ jobs:
+ # Prep artifacts: sign them and upload pipeline artifacts expected by stages-based publishing.
+ - template: /eng/jobs/prepare-signed-artifacts.yml
+ parameters:
+ PublishRidAgnosticPackagesFromJobName: Windows_x64
+ # Publish to Build Asset Registry in order to generate the ReleaseConfigs artifact.
+ - template: /eng/common/templates/job/publish-build-assets.yml
+ parameters:
+ publishUsingPipelines: true
+ dependsOn: PrepareSignedArtifacts
+ pool:
+ name: NetCoreInternal-Pool
+ queue: buildpool.windows.10.amd64.vs2017
+
+ - template: /eng/stages/publish.yml
+ parameters:
+ # Publish channel configuration. The publish template wraps Arcade publishing and adds some
+ # extras that aren't implemented in Arcade yet.
+ #
+ # - dependsOn:
+ # channel:
+ # name:
+ # bar:
+ # storage:
+ dependsOnPublishStages:
+
+ - dependsOn: NetCore_Dev5_Publish
+ channel:
+ name: .NET Core 5 Dev
+ bar: NetCore_5_Dev_Channel_Id
+ storage: master
+ public: true
+
+ - dependsOn: PVR_Publish
+ channel:
+ name: .NET Tools - Validation
+ bar: PublicValidationRelease_30_Channel_Id
+ storage: dev/validation
+ public: true
diff --git a/build.cmd b/build.cmd
new file mode 100644
index 000000000..675fdf83f
--- /dev/null
+++ b/build.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -build %*"
diff --git a/eng/Build.props b/eng/Build.props
new file mode 100644
index 000000000..306be1b57
--- /dev/null
+++ b/eng/Build.props
@@ -0,0 +1,102 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(RepoRoot)tools-local\tasks\
+ $(ArtifactsObjDir)local.tasks\$(Configuration)\build-semaphore.txt
+
+
+
+
+
+
+
+
diff --git a/eng/DisableSourceControlManagement.targets b/eng/DisableSourceControlManagement.targets
new file mode 100644
index 000000000..15fbe93bb
--- /dev/null
+++ b/eng/DisableSourceControlManagement.targets
@@ -0,0 +1,21 @@
+
+
+
+ true
+
+
+
+ false
+
+
+
+
+
+ unknown
+
+
+
+
diff --git a/eng/Signing.props b/eng/Signing.props
new file mode 100644
index 000000000..253f26bda
--- /dev/null
+++ b/eng/Signing.props
@@ -0,0 +1,81 @@
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/Tools.props b/eng/Tools.props
new file mode 100644
index 000000000..8838df202
--- /dev/null
+++ b/eng/Tools.props
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
new file mode 100644
index 000000000..16d8827ad
--- /dev/null
+++ b/eng/Version.Details.xml
@@ -0,0 +1,152 @@
+
+
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/corefx
+ 0286bce5fd360207ca707fe7024248be428cf0e9
+
+
+ https://github.com/dotnet/standard
+ 41409cc00210db660d38ad5098f45479e1526387
+
+
+ https://github.com/dotnet/coreclr
+ 0f049c3e15a7003efa4daa075d4e9ec11cff2e78
+
+
+ https://github.com/dotnet/winforms
+ aa251536c8ae637e95faf529480e8020d69aa361
+
+
+ https://github.com/dotnet/wpf
+ 11a8ba5060577dbddae4303e53583f8d4a82f172
+
+
+ https://dev.azure.com/dnceng/internal/_git/dotnet-wpf-int
+ 8db04f8b884947b9e5e3bc2be44bd9c2d46c2bfd
+
+
+
+
+ https://github.com/dotnet/arcade
+ 67f02d61a4bff11930c10fb03f77bb4f93a37690
+
+
+ https://github.com/dotnet/arcade
+ 67f02d61a4bff11930c10fb03f77bb4f93a37690
+
+
+ https://github.com/dotnet/arcade
+ 67f02d61a4bff11930c10fb03f77bb4f93a37690
+
+
+ https://github.com/dotnet/arcade
+ 67f02d61a4bff11930c10fb03f77bb4f93a37690
+
+
+ https://github.com/dotnet/arcade
+ 67f02d61a4bff11930c10fb03f77bb4f93a37690
+
+
+ https://github.com/dotnet/sourcelink
+
+
+
+
+
diff --git a/eng/Versions.props b/eng/Versions.props
new file mode 100644
index 000000000..f63334e0b
--- /dev/null
+++ b/eng/Versions.props
@@ -0,0 +1,92 @@
+
+
+
+ 5
+ 0
+ 0
+ alpha1
+ Alpha 1
+
+ false
+ release
+
+ true
+
+ dotnet
+ $(ContainerName)
+
+ $(MajorVersion).$(MinorVersion)
+ $(MajorVersion).$(MinorVersion)
+ netcoreapp$(NETCoreAppFrameworkVersion)
+
+
+ 1.0.0-beta.18619.4
+
+
+
+
+ 5.0.0-beta.19469.8
+ 5.0.0-beta.19469.8
+ 5.0.0-beta.19469.8
+
+ 1.0.0-beta2-19367-01
+
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+ 5.0.0-alpha1.19470.3
+
+ 2.2.0-prerelease.19462.3
+
+ 5.0.0-alpha1.19462.1
+
+ 5.0.0-alpha1.19462.11
+
+ 5.0.0-alpha1.19462.16
+
+ 5.0.0-alpha1.19462.37
+
+ 15.7.179
+ $(MicrosoftBuildPackageVersion)
+ $(MicrosoftBuildPackageVersion)
+ $(MicrosoftBuildPackageVersion)
+ 1.1.1
+ 4.9.4
+ 4.9.4
+ 1.0.5
+ 1.7.0
+
+
+
+ Microsoft.DotNet.Build.Tasks.Feed
+ Microsoft.Private.CoreFx.NETCoreApp
+ Microsoft.NETCore.Platforms
+ Microsoft.NETCore.Targets
+ NETStandard.Library
+ Microsoft.NETCore.Runtime.CoreCLR
+ Microsoft.Bcl.Json.Sources
+ Microsoft.TargetingPack.Private.WinRT
+ Microsoft.SymbolUploader.Build.Task
+
+
diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd
new file mode 100644
index 000000000..56c2f25ac
--- /dev/null
+++ b/eng/common/CIBuild.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
\ No newline at end of file
diff --git a/eng/common/CheckSymbols.ps1 b/eng/common/CheckSymbols.ps1
new file mode 100644
index 000000000..b8d84607b
--- /dev/null
+++ b/eng/common/CheckSymbols.ps1
@@ -0,0 +1,158 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$true)][string] $SymbolToolPath # Full path to directory where dotnet symbol-tool was installed
+)
+
+Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+function FirstMatchingSymbolDescriptionOrDefault {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + "\" + $FileName
+
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
+
+ .\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+
+ if (Test-Path $PdbPath) {
+ return "PDB"
+ }
+ elseif (Test-Path $NGenPdb) {
+ return "NGen PDB"
+ }
+ elseif (Test-Path $SODbg) {
+ return "DBG for SO"
+ }
+ elseif (Test-Path $DylibDwarf) {
+ return "Dwarf for Dylib"
+ }
+ elseif (Test-Path $SymbolPath) {
+ return "Module"
+ }
+ else {
+ return $null
+ }
+}
+
+function CountMissingSymbols {
+ param(
+ [string] $PackagePath # Path to a NuGet package
+ )
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ throw "Input file does not exist: $PackagePath"
+ }
+
+ # Extensions for which we'll look for symbols
+ $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
+
+ # How many files are missing symbol information
+ $MissingSymbols = 0
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $PackageGuid = New-Guid
+ $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
+
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+
+ # Makes easier to reference `symbol tool`
+ Push-Location $SymbolToolPath
+
+ Get-ChildItem -Recurse $ExtractPath |
+ Where-Object {$RelevantExtensions -contains $_.Extension} |
+ ForEach-Object {
+ if ($_.FullName -Match "\\ref\\") {
+ Write-Host "`t Ignoring reference assembly file" $_.FullName
+ return
+ }
+
+ $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
+ $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
+
+ Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
+
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host "No symbols found on MSDL or SymWeb!"
+ }
+ else {
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host "No symbols found on MSDL!"
+ }
+ else {
+ Write-Host "No symbols found on SymWeb!"
+ }
+ }
+ }
+ }
+
+ Pop-Location
+
+ return $MissingSymbols
+}
+
+function CheckSymbolsAvailable {
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $FileName = $_.Name
+
+ # These packages from Arcade-Services include some native libraries that
+ # our current symbol uploader can't handle. Below is a workaround until
+ # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
+ if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+ elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+
+ Write-Host "Validating $FileName "
+ $Status = CountMissingSymbols "$InputPath\$FileName"
+
+ if ($Status -ne 0) {
+ Write-Error "Missing symbols for $Status modules in the package $FileName"
+ }
+
+ Write-Host
+ }
+}
+
+CheckSymbolsAvailable
diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1
new file mode 100644
index 000000000..4c1ea7c98
--- /dev/null
+++ b/eng/common/PSScriptAnalyzerSettings.psd1
@@ -0,0 +1,11 @@
+@{
+ IncludeRules=@('PSAvoidUsingCmdletAliases',
+ 'PSAvoidUsingWMICmdlet',
+ 'PSAvoidUsingPositionalParameters',
+ 'PSAvoidUsingInvokeExpression',
+ 'PSUseDeclaredVarsMoreThanAssignments',
+ 'PSUseCmdletCorrectly',
+ 'PSStandardDSCFunctionsInResource',
+ 'PSUseIdenticalMandatoryParametersForDSC',
+ 'PSUseIdenticalParametersForDSC')
+}
\ No newline at end of file
diff --git a/eng/common/PublishToPackageFeed.proj b/eng/common/PublishToPackageFeed.proj
new file mode 100644
index 000000000..a1b133372
--- /dev/null
+++ b/eng/common/PublishToPackageFeed.proj
@@ -0,0 +1,83 @@
+
+
+
+
+
+ netcoreapp2.1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
+ https://dotnetfeed.blob.core.windows.net/arcade-validation/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore-tooling/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-entityframeworkcore/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-extensions/index.json
+ https://dotnetfeed.blob.core.windows.net/dotnet-coreclr/index.json
+ https://dotnetfeed.blob.core.windows.net/dotnet-sdk/index.json
+ https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json
+ https://dotnetfeed.blob.core.windows.net/dotnet-toolset/index.json
+ https://dotnetfeed.blob.core.windows.net/dotnet-windowsdesktop/index.json
+ https://dotnetfeed.blob.core.windows.net/nuget-nugetclient/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-entityframework6/index.json
+ https://dotnetfeed.blob.core.windows.net/aspnet-blazor/index.json
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/PublishToSymbolServers.proj b/eng/common/PublishToSymbolServers.proj
new file mode 100644
index 000000000..5d55e312b
--- /dev/null
+++ b/eng/common/PublishToSymbolServers.proj
@@ -0,0 +1,82 @@
+
+
+
+
+
+ netcoreapp2.1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 3650
+ true
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/README.md b/eng/common/README.md
new file mode 100644
index 000000000..ff49c3715
--- /dev/null
+++ b/eng/common/README.md
@@ -0,0 +1,28 @@
+# Don't touch this folder
+
+ uuuuuuuuuuuuuuuuuuuu
+ u" uuuuuuuuuuuuuuuuuu "u
+ u" u$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
+ $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
+ $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
+ $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
+ $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$" u"
+ "u """""""""""""""""" u"
+ """"""""""""""""""""
+
+!!! Changes made in this directory are subject to being overwritten by automation !!!
+
+The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
diff --git a/eng/common/SigningValidation.proj b/eng/common/SigningValidation.proj
new file mode 100644
index 000000000..3d0ac80af
--- /dev/null
+++ b/eng/common/SigningValidation.proj
@@ -0,0 +1,83 @@
+
+
+
+
+
+ netcoreapp2.1
+
+
+
+
+
+
+
+ $(NuGetPackageRoot)Microsoft.DotNet.SignCheck\$(SignCheckVersion)\tools\Microsoft.DotNet.SignCheck.exe
+
+ $(PackageBasePath)
+ signcheck.log
+ signcheck.errors.log
+ signcheck.exclusions.txt
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/SourceLinkValidation.ps1 b/eng/common/SourceLinkValidation.ps1
new file mode 100644
index 000000000..cb2d28cb9
--- /dev/null
+++ b/eng/common/SourceLinkValidation.ps1
@@ -0,0 +1,184 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$true)][string] $SourceLinkToolPath, # Full path to directory where dotnet SourceLink CLI was installed
+ [Parameter(Mandatory=$true)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
+ [Parameter(Mandatory=$true)][string] $GHCommit # GitHub commit SHA used to build the packages
+)
+
+# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
+# in the repository at a specific commit point. This is populated by inserting
+# all files present in the repo at a specific commit point.
+$global:RepoFiles = @{}
+
+$ValidatePackage = {
+ param(
+ [string] $PackagePath # Full path to a Symbols.NuGet package
+ )
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ throw "Input file does not exist: $PackagePath"
+ }
+
+ # Extensions for which we'll look for SourceLink information
+ # For now we'll only care about Portable & Embedded PDBs
+ $RelevantExtensions = @(".dll", ".exe", ".pdb")
+
+ Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... "
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+ $FailedFiles = 0
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ $Extension = [System.IO.Path]::GetExtension($_.Name)
+ $FakeName = -Join((New-Guid), $Extension)
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith(".resources.dll")) {
+ return
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+
+ $ValidateFile = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $RealPath,
+ [ref] $FailedFiles
+ )
+
+ # Makes easier to reference `sourcelink cli`
+ Push-Location $using:SourceLinkToolPath
+
+ $SourceLinkInfos = .\sourcelink.exe print-urls $FullPath | Out-String
+
+ if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
+ $NumFailedLinks = 0
+
+ # We only care about Http addresses
+ $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
+
+ if ($Matches.Count -ne 0) {
+ $Matches.Value |
+ ForEach-Object {
+ $Link = $_
+ $CommitUrl = -Join("https://raw.githubusercontent.com/", $using:GHRepoName, "/", $using:GHCommit, "/")
+ $FilePath = $Link.Replace($CommitUrl, "")
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ # Only GitHub links are valid
+ if ($Uri.AbsoluteURI -ne $null -and $Uri.Host -match "github") {
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ $Status = 0
+ }
+ }
+ catch {
+ $Status = 0
+ }
+ }
+
+ if ($Status -ne 200) {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
+ }
+ }
+ }
+
+ if ($NumFailedLinks -ne 0) {
+ $FailedFiles.value++
+ $global:LASTEXITCODE = 1
+ }
+ }
+
+ Pop-Location
+ }
+
+ &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
+ }
+
+ $zip.Dispose()
+
+ if ($FailedFiles -eq 0) {
+ Write-Host "Passed."
+ }
+}
+
+function ValidateSourceLinkLinks {
+ if (!($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) {
+ Write-Host "GHRepoName should be in the format /"
+ $global:LASTEXITCODE = 1
+ return
+ }
+
+ if (!($GHCommit -Match "^[0-9a-fA-F]{40}$")) {
+ Write-Host "GHCommit should be a 40 chars hexadecimal string"
+ $global:LASTEXITCODE = 1
+ return
+ }
+
+ $RepoTreeURL = -Join("https://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1")
+ $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript")
+
+ try {
+ # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
+ $Data = Invoke-WebRequest $RepoTreeURL | ConvertFrom-Json | Select-Object -ExpandProperty tree
+
+ foreach ($file in $Data) {
+ $Extension = [System.IO.Path]::GetExtension($file.path)
+
+ if ($CodeExtensions.Contains($Extension)) {
+ $RepoFiles[$file.path] = 1
+ }
+ }
+ }
+ catch {
+ Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL"
+ $global:LASTEXITCODE = 1
+ return
+ }
+
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ # Process each NuGet package in parallel
+ $Jobs = @()
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
+ $Jobs += Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName
+ }
+
+ foreach ($Job in $Jobs) {
+ Wait-Job -Id $Job.Id | Receive-Job
+ }
+}
+
+Measure-Command { ValidateSourceLinkLinks }
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
new file mode 100644
index 000000000..e001ccb48
--- /dev/null
+++ b/eng/common/build.ps1
@@ -0,0 +1,145 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string][Alias('c')]$configuration = "Debug",
+ [string]$platform = $null,
+ [string] $projects,
+ [string][Alias('v')]$verbosity = "minimal",
+ [string] $msbuildEngine = $null,
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [switch][Alias('r')]$restore,
+ [switch] $deployDeps,
+ [switch][Alias('b')]$build,
+ [switch] $rebuild,
+ [switch] $deploy,
+ [switch][Alias('t')]$test,
+ [switch] $integrationTest,
+ [switch] $performanceTest,
+ [switch] $sign,
+ [switch] $pack,
+ [switch] $publish,
+ [switch][Alias('bl')]$binaryLog,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+. $PSScriptRoot\tools.ps1
+
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)"
+ Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
+ Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ Write-Host " -binaryLog Output binary log (short: -bl)"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Actions:"
+ Write-Host " -restore Restore dependencies (short: -r)"
+ Write-Host " -build Build solution (short: -b)"
+ Write-Host " -rebuild Rebuild solution"
+ Write-Host " -deploy Deploy built VSIXes"
+ Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
+ Write-Host " -test Run all unit tests in the solution (short: -t)"
+ Write-Host " -integrationTest Run all integration tests in the solution"
+ Write-Host " -performanceTest Run all performance tests in the solution"
+ Write-Host " -pack Package build outputs into NuGet packages and Willow components"
+ Write-Host " -sign Sign build outputs"
+ Write-Host " -publish Publish artifacts (e.g. symbols)"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
+ Write-Host " -ci Set when running on CI server"
+ Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
+ Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host ""
+
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+ Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
+}
+
+function InitializeCustomToolset {
+ if (-not $restore) {
+ return
+ }
+
+ $script = Join-Path $EngRoot "restore-toolset.ps1"
+
+ if (Test-Path $script) {
+ . $script
+ }
+}
+
+function Build {
+ $toolsetBuildProj = InitializeToolset
+ InitializeCustomToolset
+
+ $bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "Build.binlog") } else { "" }
+ $platformArg = if ($platform) { "/p:Platform=$platform" } else { "" }
+
+ if ($projects) {
+ # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
+ # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
+ [string[]] $msbuildArgs = $properties
+
+ # Resolve relative project paths into full paths
+ $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';')
+
+ $msbuildArgs += "/p:Projects=$projects"
+ $properties = $msbuildArgs
+ }
+
+ MSBuild $toolsetBuildProj `
+ $bl `
+ $platformArg `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:Restore=$restore `
+ /p:DeployDeps=$deployDeps `
+ /p:Build=$build `
+ /p:Rebuild=$rebuild `
+ /p:Deploy=$deploy `
+ /p:Test=$test `
+ /p:Pack=$pack `
+ /p:IntegrationTest=$integrationTest `
+ /p:PerformanceTest=$performanceTest `
+ /p:Sign=$sign `
+ /p:Publish=$publish `
+ @properties
+}
+
+try {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($ci) {
+ $binaryLog = $true
+ $nodeReuse = $false
+ }
+
+ # Import custom tools configuration, if present in the repo.
+ # Note: Import in global scope so that the script set top-level variables without qualification.
+ $configureToolsetScript = Join-Path $EngRoot "configure-toolset.ps1"
+ if (Test-Path $configureToolsetScript) {
+ . $configureToolsetScript
+ }
+
+ if (($restore) -and ($null -eq $env:DisableNativeToolsetInstalls)) {
+ InitializeNativeTools
+ }
+
+ Build
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category "InitializeToolset" -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/build.sh b/eng/common/build.sh
new file mode 100644
index 000000000..6236fc4d3
--- /dev/null
+++ b/eng/common/build.sh
@@ -0,0 +1,216 @@
+#!/usr/bin/env bash
+
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+
+# Stop script if command returns non-zero exit code.
+# Prevents hidden errors caused by missing error code propagation.
+set -e
+
+usage()
+{
+ echo "Common settings:"
+ echo " --configuration Build configuration: 'Debug' or 'Release' (short: -c)"
+ echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ echo " --binaryLog Create MSBuild binary log (short: -bl)"
+ echo " --help Print help and exit (short: -h)"
+ echo ""
+
+ echo "Actions:"
+ echo " --restore Restore dependencies (short: -r)"
+ echo " --build Build solution (short: -b)"
+ echo " --rebuild Rebuild solution"
+ echo " --test Run all unit tests in the solution (short: -t)"
+ echo " --integrationTest Run all integration tests in the solution"
+ echo " --performanceTest Run all performance tests in the solution"
+ echo " --pack Package build outputs into NuGet packages and Willow components"
+ echo " --sign Sign build outputs"
+ echo " --publish Publish artifacts (e.g. symbols)"
+ echo ""
+
+ echo "Advanced settings:"
+ echo " --projects Project or solution file(s) to build"
+ echo " --ci Set when running on CI server"
+ echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
+ echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+ echo "Arguments can also be passed in with a single hyphen."
+}
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+restore=false
+build=false
+rebuild=false
+test=false
+integration_test=false
+performance_test=false
+pack=false
+publish=false
+sign=false
+public=false
+ci=false
+
+warn_as_error=true
+node_reuse=true
+binary_log=false
+pipelines_log=false
+
+projects=''
+configuration='Debug'
+prepare_machine=false
+verbosity='minimal'
+
+properties=''
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -help|-h)
+ usage
+ exit 0
+ ;;
+ -configuration|-c)
+ configuration=$2
+ shift
+ ;;
+ -verbosity|-v)
+ verbosity=$2
+ shift
+ ;;
+ -binarylog|-bl)
+ binary_log=true
+ ;;
+ -pipelineslog|-pl)
+ pipelines_log=true
+ ;;
+ -restore|-r)
+ restore=true
+ ;;
+ -build|-b)
+ build=true
+ ;;
+ -rebuild)
+ rebuild=true
+ ;;
+ -pack)
+ pack=true
+ ;;
+ -test|-t)
+ test=true
+ ;;
+ -integrationtest)
+ integration_test=true
+ ;;
+ -performancetest)
+ performance_test=true
+ ;;
+ -sign)
+ sign=true
+ ;;
+ -publish)
+ publish=true
+ ;;
+ -preparemachine)
+ prepare_machine=true
+ ;;
+ -projects)
+ projects=$2
+ shift
+ ;;
+ -ci)
+ ci=true
+ ;;
+ -warnaserror)
+ warn_as_error=$2
+ shift
+ ;;
+ -nodereuse)
+ node_reuse=$2
+ shift
+ ;;
+ *)
+ properties="$properties $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [[ "$ci" == true ]]; then
+ pipelines_log=true
+ binary_log=true
+ node_reuse=false
+fi
+
+. "$scriptroot/tools.sh"
+
+function InitializeCustomToolset {
+ local script="$eng_root/restore-toolset.sh"
+
+ if [[ -a "$script" ]]; then
+ . "$script"
+ fi
+}
+
+function Build {
+ InitializeToolset
+ InitializeCustomToolset
+
+ if [[ ! -z "$projects" ]]; then
+ properties="$properties /p:Projects=$projects"
+ fi
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:\"$log_dir/Build.binlog\""
+ fi
+
+ MSBuild $_InitializeToolset \
+ $bl \
+ /p:Configuration=$configuration \
+ /p:RepoRoot="$repo_root" \
+ /p:Restore=$restore \
+ /p:Build=$build \
+ /p:Rebuild=$rebuild \
+ /p:Test=$test \
+ /p:Pack=$pack \
+ /p:IntegrationTest=$integration_test \
+ /p:PerformanceTest=$performance_test \
+ /p:Sign=$sign \
+ /p:Publish=$publish \
+ $properties
+
+ ExitWithExitCode 0
+}
+
+# Import custom tools configuration, if present in the repo.
+configure_toolset_script="$eng_root/configure-toolset.sh"
+if [[ -a "$configure_toolset_script" ]]; then
+ . "$configure_toolset_script"
+fi
+
+# TODO: https://github.com/dotnet/arcade/issues/1468
+# Temporary workaround to avoid breaking change.
+# Remove once repos are updated.
+if [[ -n "${useInstalledDotNetCli:-}" ]]; then
+ use_installed_dotnet_cli="$useInstalledDotNetCli"
+fi
+
+if [[ "$restore" == true && -z ${DisableNativeToolsetInstalls:-} ]]; then
+ InitializeNativeTools
+fi
+
+Build
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
new file mode 100644
index 000000000..1a02c0dec
--- /dev/null
+++ b/eng/common/cibuild.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where
+ # the symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
\ No newline at end of file
diff --git a/eng/common/cross/android/arm/toolchain.cmake b/eng/common/cross/android/arm/toolchain.cmake
new file mode 100644
index 000000000..a7e1c7350
--- /dev/null
+++ b/eng/common/cross/android/arm/toolchain.cmake
@@ -0,0 +1,41 @@
+set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
+set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
+set(CLR_CMAKE_PLATFORM_ANDROID "Android")
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_VERSION 1)
+set(CMAKE_SYSTEM_PROCESSOR arm)
+
+## Specify the toolchain
+set(TOOLCHAIN "arm-linux-androideabi")
+set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
+set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
+
+find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
+find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
+find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
+
+add_compile_options(--sysroot=${CROSS_ROOTFS})
+add_compile_options(-fPIE)
+add_compile_options(-mfloat-abi=soft)
+include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/)
+include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/)
+
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+
+set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/cross/android/arm64/toolchain.cmake b/eng/common/cross/android/arm64/toolchain.cmake
new file mode 100644
index 000000000..29415899c
--- /dev/null
+++ b/eng/common/cross/android/arm64/toolchain.cmake
@@ -0,0 +1,42 @@
+set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
+set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
+set(CLR_CMAKE_PLATFORM_ANDROID "Android")
+
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_VERSION 1)
+set(CMAKE_SYSTEM_PROCESSOR aarch64)
+
+## Specify the toolchain
+set(TOOLCHAIN "aarch64-linux-android")
+set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
+set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
+
+find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
+find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
+find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
+find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
+find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
+
+add_compile_options(--sysroot=${CROSS_ROOTFS})
+add_compile_options(-fPIE)
+
+## Needed for Android or bionic specific conditionals
+add_compile_options(-D__ANDROID__)
+add_compile_options(-D__BIONIC__)
+
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
+set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
+
+set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
+
+set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic
new file mode 100644
index 000000000..210955740
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie
new file mode 100644
index 000000000..4d142ac9b
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (sid) # UNSTABLE
+deb http://ftp.debian.org/debian/ sid main contrib non-free
+deb-src http://ftp.debian.org/debian/ sid main contrib non-free
diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty
new file mode 100644
index 000000000..07d8f88d8
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.trusty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial
new file mode 100644
index 000000000..eacd86b7d
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty
new file mode 100644
index 000000000..ea2c14a78
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch
new file mode 100644
index 000000000..8e4dd7ae7
--- /dev/null
+++ b/eng/common/cross/arm/trusty-lttng-2.4.patch
@@ -0,0 +1,71 @@
+From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
+From: Mathieu Desnoyers
+Date: Thu, 7 May 2015 13:25:04 -0400
+Subject: [PATCH] Fix: building probe providers with C++ compiler
+
+Robert Daniels wrote:
+> > I'm attempting to use lttng userspace tracing with a C++ application
+> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
+> > release of lttng. I've compiled lttng-modules, lttng-ust, and
+> > lttng-tools and have been able to get a simple test working with C
+> > code. When I attempt to run the hello.cxx test on my target it will
+> > segfault.
+>
+>
+> I spent a little time digging into this issue and finally discovered the
+> cause of my segfault with ARM C++ tracepoints.
+>
+> There is a struct called 'lttng_event' in ust-events.h which contains an
+> empty union 'u'. This was the cause of my issue. Under C, this empty union
+> compiles to a zero byte member while under C++ it compiles to a one byte
+> member, and in my case was four-byte aligned which caused my C++ code to
+> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
+> to an incorrect linked list structure which caused my issue.
+>
+> Since this union is empty, I simply removed it from the struct and everything
+> worked correctly.
+>
+> I don't know the history or purpose behind this empty union so I'd like to
+> know if this is a safe fix. If it is I can submit a patch with the union
+> removed.
+
+That's a very nice catch!
+
+We do not support building tracepoint probe provider with
+g++ yet, as stated in lttng-ust(3):
+
+"- Note for C++ support: although an application instrumented with
+ tracepoints can be compiled with g++, tracepoint probes should be
+ compiled with gcc (only tested with gcc so far)."
+
+However, if it works fine with this fix, then I'm tempted to take it,
+especially because removing the empty union does not appear to affect
+the layout of struct lttng_event as seen from liblttng-ust, which must
+be compiled with a C compiler, and from probe providers compiled with
+a C compiler. So all we are changing is the layout of a probe provider
+compiled with a C++ compiler, which is anyway buggy at the moment,
+because it is not compatible with the layout expected by liblttng-ust
+compiled with a C compiler.
+
+Reported-by: Robert Daniels
+Signed-off-by: Mathieu Desnoyers
+---
+ include/lttng/ust-events.h | 2 --
+ 1 file changed, 2 deletions(-)
+
+diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
+index 328a875..3d7a274 100644
+--- a/usr/include/lttng/ust-events.h
++++ b/usr/include/lttng/ust-events.h
+@@ -407,8 +407,6 @@ struct lttng_event {
+ void *_deprecated1;
+ struct lttng_ctx *ctx;
+ enum lttng_ust_instrumentation instrumentation;
+- union {
+- } u;
+ struct cds_list_head node; /* Event list in session */
+ struct cds_list_head _deprecated2;
+ void *_deprecated3;
+--
+2.7.4
+
diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch
new file mode 100644
index 000000000..2f2972f8e
--- /dev/null
+++ b/eng/common/cross/arm/trusty.patch
@@ -0,0 +1,97 @@
+diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
+--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
++++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
+@@ -65,17 +65,17 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- return __sync_val_compare_and_swap_1(addr, old, _new);
++ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- return __sync_val_compare_and_swap_2(addr, old, _new);
++ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
+ #endif
+ case 4:
+- return __sync_val_compare_and_swap_4(addr, old, _new);
++ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- return __sync_val_compare_and_swap_8(addr, old, _new);
++ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
+ #endif
+ }
+ _uatomic_link_error();
+@@ -100,20 +100,20 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- __sync_and_and_fetch_1(addr, val);
++ __sync_and_and_fetch_1((uint8_t *) addr, val);
+ return;
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- __sync_and_and_fetch_2(addr, val);
++ __sync_and_and_fetch_2((uint16_t *) addr, val);
+ return;
+ #endif
+ case 4:
+- __sync_and_and_fetch_4(addr, val);
++ __sync_and_and_fetch_4((uint32_t *) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- __sync_and_and_fetch_8(addr, val);
++ __sync_and_and_fetch_8((uint64_t *) addr, val);
+ return;
+ #endif
+ }
+@@ -139,20 +139,20 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- __sync_or_and_fetch_1(addr, val);
++ __sync_or_and_fetch_1((uint8_t *) addr, val);
+ return;
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- __sync_or_and_fetch_2(addr, val);
++ __sync_or_and_fetch_2((uint16_t *) addr, val);
+ return;
+ #endif
+ case 4:
+- __sync_or_and_fetch_4(addr, val);
++ __sync_or_and_fetch_4((uint32_t *) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- __sync_or_and_fetch_8(addr, val);
++ __sync_or_and_fetch_8((uint64_t *) addr, val);
+ return;
+ #endif
+ }
+@@ -180,17 +180,17 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- return __sync_add_and_fetch_1(addr, val);
++ return __sync_add_and_fetch_1((uint8_t *) addr, val);
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- return __sync_add_and_fetch_2(addr, val);
++ return __sync_add_and_fetch_2((uint16_t *) addr, val);
+ #endif
+ case 4:
+- return __sync_add_and_fetch_4(addr, val);
++ return __sync_add_and_fetch_4((uint32_t *) addr, val);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- return __sync_add_and_fetch_8(addr, val);
++ return __sync_add_and_fetch_8((uint64_t *) addr, val);
+ #endif
+ }
+ _uatomic_link_error();
diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic
new file mode 100644
index 000000000..210955740
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster
new file mode 100644
index 000000000..7194ac64a
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.buster
@@ -0,0 +1,11 @@
+deb http://deb.debian.org/debian buster main
+deb-src http://deb.debian.org/debian buster main
+
+deb http://deb.debian.org/debian-security/ buster/updates main
+deb-src http://deb.debian.org/debian-security/ buster/updates main
+
+deb http://deb.debian.org/debian buster-updates main
+deb-src http://deb.debian.org/debian buster-updates main
+
+deb http://deb.debian.org/debian buster-backports main contrib non-free
+deb-src http://deb.debian.org/debian buster-backports main contrib non-free
diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch
new file mode 100644
index 000000000..0e1215774
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.stretch
@@ -0,0 +1,12 @@
+deb http://deb.debian.org/debian stretch main
+deb-src http://deb.debian.org/debian stretch main
+
+deb http://deb.debian.org/debian-security/ stretch/updates main
+deb-src http://deb.debian.org/debian-security/ stretch/updates main
+
+deb http://deb.debian.org/debian stretch-updates main
+deb-src http://deb.debian.org/debian stretch-updates main
+
+deb http://deb.debian.org/debian stretch-backports main contrib non-free
+deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
+
diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty
new file mode 100644
index 000000000..07d8f88d8
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.trusty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial
new file mode 100644
index 000000000..eacd86b7d
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty
new file mode 100644
index 000000000..ea2c14a78
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie
new file mode 100644
index 000000000..3d9c3059d
--- /dev/null
+++ b/eng/common/cross/armel/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (jessie) # Stable
+deb http://ftp.debian.org/debian/ jessie main contrib non-free
+deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh
new file mode 100644
index 000000000..87c48e78f
--- /dev/null
+++ b/eng/common/cross/armel/tizen-build-rootfs.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+set -e
+
+__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.)
+# hk0110
+if [ -d "$ROOTFS_DIR" ]; then
+ umount $ROOTFS_DIR/*
+ rm -rf $ROOTFS_DIR
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<>Start configuring Tizen rootfs"
+rm ./usr/lib/libunwind.so
+ln -s libunwind.so.8 ./usr/lib/libunwind.so
+ln -sfn asm-arm ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "</dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/releases/milestone/tizen
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic
+fetch_tizen_pkgs noarch linux-glibc-devel
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks
new file mode 100644
index 000000000..506d455bd
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen-dotnet.ks
@@ -0,0 +1,50 @@
+lang en_US.UTF-8
+keyboard us
+timezone --utc Asia/Seoul
+
+part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
+
+rootpw tizen
+desktop --autologinuser=root
+user --name root --groups audio,video --password 'tizen'
+
+repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
+repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
+
+%packages
+tar
+gzip
+
+sed
+grep
+gawk
+perl
+
+binutils
+findutils
+util-linux
+lttng-ust
+userspace-rcu
+procps-ng
+tzdata
+ca-certificates
+
+
+### Core FX
+libicu
+libunwind
+iputils
+zlib
+krb5
+libcurl
+libopenssl
+
+%end
+
+%post
+
+### Update /tmp privilege
+chmod 777 /tmp
+####################################
+
+%end
diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch
new file mode 100644
index 000000000..d223427c9
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen.patch
@@ -0,0 +1,18 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-littlearm)
+-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
+diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so
+--- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900
++++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-littlearm)
+-GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a )
++GROUP ( libpthread.so.0 libpthread_nonshared.a )
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
new file mode 100644
index 000000000..adceda877
--- /dev/null
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -0,0 +1,137 @@
+#!/usr/bin/env bash
+set -e
+__NDK_Version=r14
+
+usage()
+{
+ echo "Creates a toolchain and sysroot used for cross-compiling for Android."
+ echo.
+ echo "Usage: $0 [BuildArch] [ApiLevel]"
+ echo.
+ echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
+ echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
+ echo.
+ echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
+ echo "by setting the TOOLCHAIN_DIR environment variable"
+ echo.
+ echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
+ echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
+ echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android."
+ exit 1
+}
+
+__ApiLevel=21 # The minimum platform for arm64 is API level 21
+__BuildArch=arm64
+__AndroidArch=aarch64
+__AndroidToolchain=aarch64-linux-android
+
+for i in "$@"
+ do
+ lowerI="$(echo $i | awk '{print tolower($0)}')"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __AndroidArch=aarch64
+ __AndroidToolchain=aarch64-linux-android
+ ;;
+ arm)
+ __BuildArch=arm
+ __AndroidArch=arm
+ __AndroidToolchain=arm-linux-androideabi
+ ;;
+ *[0-9])
+ __ApiLevel=$i
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
+ ;;
+ esac
+done
+
+# Obtain the location of the bash script to figure out where the root of the repo is.
+__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+__Android_Cross_Dir="$__CrossDir/android-rootfs"
+__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version"
+__libunwind_Dir="$__Android_Cross_Dir/libunwind"
+__lldb_Dir="$__Android_Cross_Dir/lldb"
+__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch"
+
+if [[ -n "$TOOLCHAIN_DIR" ]]; then
+ __ToolchainDir=$TOOLCHAIN_DIR
+fi
+
+if [[ -n "$NDK_DIR" ]]; then
+ __NDK_Dir=$NDK_DIR
+fi
+
+echo "Target API level: $__ApiLevel"
+echo "Target architecture: $__BuildArch"
+echo "NDK location: $__NDK_Dir"
+echo "Target Toolchain location: $__ToolchainDir"
+
+# Download the NDK if required
+if [ ! -d $__NDK_Dir ]; then
+ echo Downloading the NDK into $__NDK_Dir
+ mkdir -p $__NDK_Dir
+ wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip
+ unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir
+fi
+
+if [ ! -d $__lldb_Dir ]; then
+ mkdir -p $__lldb_Dir
+ echo Downloading LLDB into $__lldb_Dir
+ wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip
+ unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
+fi
+
+# Create the RootFS for both arm64 as well as aarch
+rm -rf $__Android_Cross_Dir/toolchain
+
+echo Generating the $__BuildArch toolchain
+$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir
+
+# Install the required packages into the toolchain
+# TODO: Add logic to get latest pkg version instead of specific version number
+rm -rf $__Android_Cross_Dir/deb/
+rm -rf $__Android_Cross_Dir/tmp
+
+mkdir -p $__Android_Cross_Dir/deb/
+mkdir -p $__Android_Cross_Dir/tmp/$arch/
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb
+
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb
+wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb
+
+echo Unpacking Termux packages
+dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
+
+cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/
+
+# Generate platform file for build.sh script to assign to __DistroRid
+echo "Generating platform file..."
+
+echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform
+echo Now run:
+echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0
+
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
new file mode 100644
index 000000000..d7d5d7d5f
--- /dev/null
+++ b/eng/common/cross/build-rootfs.sh
@@ -0,0 +1,234 @@
+#!/usr/bin/env bash
+
+usage()
+{
+ echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
+ echo "BuildArch can be: arm(default), armel, arm64, x86"
+ echo "LinuxCodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine"
+ echo "--skipunmount - optional, will skip the unmount of rootfs folder."
+ exit 1
+}
+
+__LinuxCodeName=xenial
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__InitialDir=$PWD
+__BuildArch=arm
+__UbuntuArch=armhf
+__UbuntuRepo="http://ports.ubuntu.com/"
+__LLDB_Package="liblldb-3.9-dev"
+__SkipUnmount=0
+
+# base development support
+__UbuntuPackages="build-essential"
+
+__AlpinePackages="alpine-base"
+__AlpinePackages+=" build-base"
+__AlpinePackages+=" linux-headers"
+__AlpinePackages+=" lldb-dev"
+__AlpinePackages+=" llvm-dev"
+
+# symlinks fixer
+__UbuntuPackages+=" symlinks"
+
+# CoreCLR and CoreFX dependencies
+__UbuntuPackages+=" libicu-dev"
+__UbuntuPackages+=" liblttng-ust-dev"
+__UbuntuPackages+=" libunwind8-dev"
+
+__AlpinePackages+=" gettext-dev"
+__AlpinePackages+=" icu-dev"
+__AlpinePackages+=" libunwind-dev"
+__AlpinePackages+=" lttng-ust-dev"
+
+# CoreFX dependencies
+__UbuntuPackages+=" libcurl4-openssl-dev"
+__UbuntuPackages+=" libkrb5-dev"
+__UbuntuPackages+=" libssl-dev"
+__UbuntuPackages+=" zlib1g-dev"
+
+__AlpinePackages+=" curl-dev"
+__AlpinePackages+=" krb5-dev"
+__AlpinePackages+=" openssl-dev"
+__AlpinePackages+=" zlib-dev"
+
+__UnprocessedBuildArgs=
+while :; do
+ if [ $# -le 0 ]; then
+ break
+ fi
+
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm)
+ __BuildArch=arm
+ __UbuntuArch=armhf
+ __AlpineArch=armhf
+ __QEMUArch=arm
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __UbuntuArch=arm64
+ __AlpineArch=aarch64
+ __QEMUArch=aarch64
+ ;;
+ armel)
+ __BuildArch=armel
+ __UbuntuArch=armel
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LinuxCodeName=jessie
+ ;;
+ x86)
+ __BuildArch=x86
+ __UbuntuArch=i386
+ __UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
+ ;;
+ lldb3.6)
+ __LLDB_Package="lldb-3.6-dev"
+ ;;
+ lldb3.8)
+ __LLDB_Package="lldb-3.8-dev"
+ ;;
+ lldb3.9)
+ __LLDB_Package="liblldb-3.9-dev"
+ ;;
+ lldb4.0)
+ __LLDB_Package="liblldb-4.0-dev"
+ ;;
+ lldb5.0)
+ __LLDB_Package="liblldb-5.0-dev"
+ ;;
+ lldb6.0)
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ no-lldb)
+ unset __LLDB_Package
+ ;;
+ trusty) # Ubuntu 14.04
+ if [ "$__LinuxCodeName" != "jessie" ]; then
+ __LinuxCodeName=trusty
+ fi
+ ;;
+ xenial) # Ubuntu 16.04
+ if [ "$__LinuxCodeName" != "jessie" ]; then
+ __LinuxCodeName=xenial
+ fi
+ ;;
+ zesty) # Ubuntu 17.04
+ if [ "$__LinuxCodeName" != "jessie" ]; then
+ __LinuxCodeName=zesty
+ fi
+ ;;
+ bionic) # Ubuntu 18.04
+ if [ "$__LinuxCodeName" != "jessie" ]; then
+ __LinuxCodeName=bionic
+ fi
+ ;;
+ jessie) # Debian 8
+ __LinuxCodeName=jessie
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ ;;
+ stretch) # Debian 9
+ __LinuxCodeName=stretch
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ buster) # Debian 10
+ __LinuxCodeName=buster
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ tizen)
+ if [ "$__BuildArch" != "armel" ]; then
+ echo "Tizen is available only for armel."
+ usage;
+ exit 1;
+ fi
+ __LinuxCodeName=
+ __UbuntuRepo=
+ __Tizen=tizen
+ ;;
+ alpine)
+ __LinuxCodeName=alpine
+ __UbuntuRepo=
+ ;;
+ --skipunmount)
+ __SkipUnmount=1
+ ;;
+ --rootfsdir|-rootfsdir)
+ shift
+ __RootfsDir=$1
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [ "$__BuildArch" == "armel" ]; then
+ __LLDB_Package="lldb-3.5-dev"
+fi
+__UbuntuPackages+=" ${__LLDB_Package:-}"
+
+if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
+ __RootfsDir=$ROOTFS_DIR
+fi
+
+if [ -z "$__RootfsDir" ]; then
+ __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
+fi
+
+if [ -d "$__RootfsDir" ]; then
+ if [ $__SkipUnmount == 0 ]; then
+ umount $__RootfsDir/*
+ fi
+ rm -rf $__RootfsDir
+fi
+
+if [[ "$__LinuxCodeName" == "alpine" ]]; then
+ __ApkToolsVersion=2.9.1
+ __AlpineVersion=3.7
+ __ApkToolsDir=$(mktemp -d)
+ wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
+ tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
+ mkdir -p $__RootfsDir/usr/bin
+ cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
+ -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/testing \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackages
+ rm -r $__ApkToolsDir
+elif [[ -n $__LinuxCodeName ]]; then
+ qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo
+ cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list
+ chroot $__RootfsDir apt-get update
+ chroot $__RootfsDir apt-get -f -y install
+ chroot $__RootfsDir apt-get -y install $__UbuntuPackages
+ chroot $__RootfsDir symlinks -cr /usr
+
+ if [ $__SkipUnmount == 0 ]; then
+ umount $__RootfsDir/*
+ fi
+
+ if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then
+ pushd $__RootfsDir
+ patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
+ patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
+ popd
+ fi
+elif [ "$__Tizen" == "tizen" ]; then
+ ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
+else
+ echo "Unsupported target platform."
+ usage;
+ exit 1
+fi
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
new file mode 100644
index 000000000..071d41124
--- /dev/null
+++ b/eng/common/cross/toolchain.cmake
@@ -0,0 +1,138 @@
+set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
+
+set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
+set(CMAKE_SYSTEM_NAME Linux)
+set(CMAKE_SYSTEM_VERSION 1)
+
+if(TARGET_ARCH_NAME STREQUAL "armel")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ set(TOOLCHAIN "arm-linux-gnueabi")
+ if("$ENV{__DistroRid}" MATCHES "tizen.*")
+ set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
+ else()
+ set(TOOLCHAIN "arm-linux-gnueabihf")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(CMAKE_SYSTEM_PROCESSOR aarch64)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
+ set(TOOLCHAIN "aarch64-alpine-linux-musl")
+ else()
+ set(TOOLCHAIN "aarch64-linux-gnu")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ set(CMAKE_SYSTEM_PROCESSOR i686)
+ set(TOOLCHAIN "i686-linux-gnu")
+else()
+ message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
+endif()
+
+# Specify include paths
+if(TARGET_ARCH_NAME STREQUAL "armel")
+ if(DEFINED TIZEN_TOOLCHAIN)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
+ endif()
+endif()
+
+# add_compile_param - adds only new options without duplicates.
+# arg0 - list with result options, arg1 - list with new options.
+# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode.
+macro(add_compile_param)
+ if(NOT ${ARGC} MATCHES "^(2|3)$")
+ message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.")
+ endif()
+ foreach(OPTION ${ARGV1})
+ if(NOT ${ARGV0} MATCHES "${OPTION}($| )")
+ set(${ARGV0} "${${ARGV0}} ${OPTION}")
+ if(${ARGC} EQUAL "3") # CACHE FORCE mode
+ set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE)
+ endif()
+ endif()
+ endforeach()
+endmacro()
+
+# Specify link flags
+add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}")
+add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr")
+add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}")
+add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold")
+
+if(TARGET_ARCH_NAME STREQUAL "armel")
+ if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
+ add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib")
+ add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib")
+ add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_compile_param(CROSS_LINK_FLAGS "-m32")
+endif()
+
+add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
+add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
+add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
+
+# Specify compile options
+add_compile_options("--sysroot=${CROSS_ROOTFS}")
+add_compile_options("--target=${TOOLCHAIN}")
+add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr")
+
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
+ set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
+endif()
+
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
+ add_compile_options(-mthumb)
+ add_compile_options(-mfpu=vfpv3)
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ add_compile_options(-mfloat-abi=softfp)
+ if(DEFINED TIZEN_TOOLCHAIN)
+ add_compile_options(-Wno-deprecated-declarations) # compile-time option
+ add_compile_options(-D__extern_always_inline=inline) # compile-time option
+ endif()
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_compile_options(-m32)
+ add_compile_options(-Wno-error=unused-command-line-argument)
+endif()
+
+# Set LLDB include and library paths
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
+ else() # arm/armel case
+ set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
+ endif()
+ if(LLVM_CROSS_DIR)
+ set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
+ set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
+ set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
+ else()
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
+ set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
+ if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
+ set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
+ else()
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
+ endif()
+ else() # arm/armel case
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
+ endif()
+ endif()
+endif()
+
+set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
new file mode 100644
index 000000000..46d175fdf
--- /dev/null
+++ b/eng/common/darc-init.ps1
@@ -0,0 +1,33 @@
+param (
+ $darcVersion = $null,
+ $versionEndpoint = "https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16",
+ $verbosity = "m"
+)
+
+. $PSScriptRoot\tools.ps1
+
+function InstallDarcCli ($darcVersion) {
+ $darcCliPackageName = "microsoft.dotnet.darc"
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$darcCliPackageName*") {
+ & "$dotnet" tool uninstall $darcCliPackageName -g
+ }
+
+ # If the user didn't explicitly specify the darc version,
+ # query the Maestro API for the correct version of darc to install.
+ if (-not $darcVersion) {
+ $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
+ }
+
+ $arcadeServicesSource = 'https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json'
+
+ Write-Host "Installing Darc CLI version $darcVersion..."
+ Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
+}
+
+InstallDarcCli $darcVersion
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
new file mode 100644
index 000000000..242429bca
--- /dev/null
+++ b/eng/common/darc-init.sh
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+darcVersion=''
+versionEndpoint="https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16"
+verbosity=m
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ --darcversion)
+ darcVersion=$2
+ shift
+ ;;
+ --versionendpoint)
+ versionEndpoint=$2
+ shift
+ ;;
+ --verbosity)
+ verbosity=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ -z "$darcVersion" ]; then
+ darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
+fi
+
+function InstallDarcCli {
+ local darc_cli_package_name="microsoft.dotnet.darc"
+
+ InitializeDotNetCli
+ local dotnet_root=$_InitializeDotNetCli
+
+ local uninstall_command=`$dotnet_root/dotnet tool uninstall $darc_cli_package_name -g`
+ local tool_list=$($dotnet_root/dotnet tool list -g)
+ if [[ $tool_list = *$darc_cli_package_name* ]]; then
+ echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
+ fi
+
+ local arcadeServicesSource="https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json"
+
+ echo "Installing Darc CLI version $darcVersion..."
+ echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
+}
+
+InstallDarcCli
diff --git a/eng/common/dotnet-install.cmd b/eng/common/dotnet-install.cmd
new file mode 100644
index 000000000..b1c2642e7
--- /dev/null
+++ b/eng/common/dotnet-install.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
\ No newline at end of file
diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1
new file mode 100644
index 000000000..0b629b830
--- /dev/null
+++ b/eng/common/dotnet-install.ps1
@@ -0,0 +1,27 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = "minimal",
+ [string] $architecture = "",
+ [string] $version = "Latest",
+ [string] $runtime = "dotnet"
+)
+
+. $PSScriptRoot\tools.ps1
+
+$dotnetRoot = Join-Path $RepoRoot ".dotnet"
+
+$installdir = $dotnetRoot
+try {
+ if ($architecture -and $architecture.Trim() -eq "x86") {
+ $installdir = Join-Path $installdir "x86"
+ }
+ InstallDotNet $installdir $version $architecture $runtime $true
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
new file mode 100644
index 000000000..c3072c958
--- /dev/null
+++ b/eng/common/dotnet-install.sh
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+version='Latest'
+architecture=''
+runtime='dotnet'
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -version|-v)
+ shift
+ version="$1"
+ ;;
+ -architecture|-a)
+ shift
+ architecture="$1"
+ ;;
+ -runtime|-r)
+ shift
+ runtime="$1"
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+. "$scriptroot/tools.sh"
+dotnetRoot="$repo_root/.dotnet"
+InstallDotNet $dotnetRoot $version "$architecture" $runtime true || {
+ local exit_code=$?
+ echo "dotnet-install.sh failed (exit code '$exit_code')." >&2
+ ExitWithExitCode $exit_code
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1
new file mode 100644
index 000000000..eccbf9f1b
--- /dev/null
+++ b/eng/common/enable-cross-org-publishing.ps1
@@ -0,0 +1,6 @@
+param(
+ [string] $token
+)
+
+Write-Host "##vso[task.setvariable variable=VSS_NUGET_ACCESSTOKEN]$token"
+Write-Host "##vso[task.setvariable variable=VSS_NUGET_URI_PREFIXES]https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/"
diff --git a/eng/common/generate-graph-files.ps1 b/eng/common/generate-graph-files.ps1
new file mode 100644
index 000000000..b056e4c1a
--- /dev/null
+++ b/eng/common/generate-graph-files.ps1
@@ -0,0 +1,87 @@
+Param(
+ [Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
+ [Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
+ [Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
+ [Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
+ [string] $darcVersion = '1.1.0-beta.19175.6', # darc's version
+ [string] $graphvizVersion = '2.38', # GraphViz version
+ [switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
+ # toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
+)
+
+$ErrorActionPreference = "Stop"
+. $PSScriptRoot\tools.ps1
+
+Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
+
+function CheckExitCode ([string]$stage)
+{
+ $exitCode = $LASTEXITCODE
+ if ($exitCode -ne 0) {
+ Write-Host "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
+ ExitWithExitCode $exitCode
+ }
+}
+
+try {
+ Push-Location $PSScriptRoot
+
+ Write-Host "Installing darc..."
+ . .\darc-init.ps1 -darcVersion $darcVersion
+ CheckExitCode "Running darc-init"
+
+ $engCommonBaseDir = Join-Path $PSScriptRoot "native\"
+ $graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
+ $nativeToolBaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external"
+ $installBin = Join-Path $graphvizInstallDir "bin"
+
+ Write-Host "Installing dot..."
+ .\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
+
+ $darcExe = "$env:USERPROFILE\.dotnet\tools"
+ $darcExe = Resolve-Path "$darcExe\darc.exe"
+
+ Create-Directory $outputFolder
+
+ # Generate 3 graph descriptions:
+ # 1. Flat with coherency information
+ # 2. Graphviz (dot) file
+ # 3. Standard dependency graph
+ $graphVizFilePath = "$outputFolder\graphviz.txt"
+ $graphVizImageFilePath = "$outputFolder\graph.png"
+ $normalGraphFilePath = "$outputFolder\graph-full.txt"
+ $flatGraphFilePath = "$outputFolder\graph-flat.txt"
+ $baseOptions = @( "--github-pat", "$gitHubPat", "--azdev-pat", "$azdoPat", "--password", "$barToken" )
+
+ if ($includeToolset) {
+ Write-Host "Toolsets will be included in the graph..."
+ $baseOptions += @( "--include-toolset" )
+ }
+
+ Write-Host "Generating standard dependency graph..."
+ & "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
+ CheckExitCode "Generating normal dependency graph"
+
+ Write-Host "Generating flat dependency graph and graphviz file..."
+ & "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
+ CheckExitCode "Generating flat and graphviz dependency graph"
+
+ Write-Host "Generating graph image $graphVizFilePath"
+ $dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
+ & "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
+ CheckExitCode "Generating graphviz image"
+
+ Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
+}
+catch {
+ if (!$includeToolset) {
+ Write-Host "This might be a toolset repo which includes only toolset dependencies. " -NoNewline -ForegroundColor Yellow
+ Write-Host "Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again..." -ForegroundColor Yellow
+ }
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+} finally {
+ Pop-Location
+}
\ No newline at end of file
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
new file mode 100644
index 000000000..d7f185856
--- /dev/null
+++ b/eng/common/helixpublish.proj
@@ -0,0 +1,26 @@
+
+
+
+ msbuild
+
+
+
+
+ %(Identity)
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand)
+ $(WorkItemTimeout)
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd
new file mode 100644
index 000000000..438cd548c
--- /dev/null
+++ b/eng/common/init-tools-native.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
+exit /b %ErrorLevel%
\ No newline at end of file
diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1
new file mode 100644
index 000000000..8cf18bcfe
--- /dev/null
+++ b/eng/common/init-tools-native.ps1
@@ -0,0 +1,147 @@
+<#
+.SYNOPSIS
+Entry point script for installing native tools
+
+.DESCRIPTION
+Reads $RepoRoot\global.json file to determine native assets to install
+and executes installers for those tools
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER InstallDirectory
+Directory to install native toolset. This is a command-line override for the default
+Install directory precedence order:
+- InstallDirectory command-line override
+- NETCOREENG_INSTALL_DIRECTORY environment variable
+- (default) %USERPROFILE%/.netcoreeng/native
+
+.PARAMETER Clean
+Switch specifying to not install anything, but cleanup native asset folders
+
+.PARAMETER Force
+Clean and then install tools
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.PARAMETER GlobalJsonFile
+File path to global.json file
+
+.NOTES
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external",
+ [string] $InstallDirectory,
+ [switch] $Clean = $False,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [string] $GlobalJsonFile
+)
+
+if (!$GlobalJsonFile) {
+ $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName "global.json"
+}
+
+Set-StrictMode -version 2.0
+$ErrorActionPreference="Stop"
+
+Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $EngCommonBaseDir = Join-Path $PSScriptRoot "native\"
+ $NativeBaseDir = $InstallDirectory
+ if (!$NativeBaseDir) {
+ $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
+ }
+ $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
+ $InstallBin = Join-Path $NativeBaseDir "bin"
+ $InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1"
+
+ # Process tools list
+ Write-Host "Processing $GlobalJsonFile"
+ If (-Not (Test-Path $GlobalJsonFile)) {
+ Write-Host "Unable to find '$GlobalJsonFile'"
+ exit 0
+ }
+ $NativeTools = Get-Content($GlobalJsonFile) -Raw |
+ ConvertFrom-Json |
+ Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue
+ if ($NativeTools) {
+ $NativeTools.PSObject.Properties | ForEach-Object {
+ $ToolName = $_.Name
+ $ToolVersion = $_.Value
+ $LocalInstallerArguments = @{ ToolName = "$ToolName" }
+ $LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
+ $LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
+ $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
+ $LocalInstallerArguments += @{ Version = "$ToolVersion" }
+
+ if ($Verbose) {
+ $LocalInstallerArguments += @{ Verbose = $True }
+ }
+ if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
+ if($Force) {
+ $LocalInstallerArguments += @{ Force = $True }
+ }
+ }
+ if ($Clean) {
+ $LocalInstallerArguments += @{ Clean = $True }
+ }
+
+ Write-Verbose "Installing $ToolName version $ToolVersion"
+ Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
+ & $InstallerPath @LocalInstallerArguments
+ if ($LASTEXITCODE -Ne "0") {
+ $errMsg = "$ToolName installation failed"
+ if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
+ $showNativeToolsWarning = $true
+ if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
+ $showNativeToolsWarning = $false
+ }
+ if ($showNativeToolsWarning) {
+ Write-Warning $errMsg
+ }
+ $toolInstallationFailure = $true
+ } else {
+ Write-Error $errMsg
+ exit 1
+ }
+ }
+ }
+
+ if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
+ exit 1
+ }
+ }
+ else {
+ Write-Host "No native tools defined in global.json"
+ exit 0
+ }
+
+ if ($Clean) {
+ exit 0
+ }
+ if (Test-Path $InstallBin) {
+ Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin)
+ Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
+ }
+ else {
+ Write-Error "Native tools install directory does not exist, installation failed"
+ exit 1
+ }
+ exit 0
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ exit 1
+}
diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh
new file mode 100644
index 000000000..4dafaaca1
--- /dev/null
+++ b/eng/common/init-tools-native.sh
@@ -0,0 +1,141 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
+install_directory=''
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
+declare -A native_assets
+
+. $scriptroot/native/common-library.sh
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installdirectory)
+ install_directory=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --installdirectory Directory to install native toolset."
+ echo " This is a command-line override for the default"
+ echo " Install directory precedence order:"
+ echo " - InstallDirectory command-line override"
+ echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
+ echo " - (default) %USERPROFILE%/.netcoreeng/native"
+ echo ""
+ echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
+ echo " --force Clean and then install tools"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --baseuri Base URI for where to download native tools from"
+ echo " --downloadretries Number of times a download should be attempted"
+ echo " --retrywaittimeseconds Wait time between download attempts"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+function ReadGlobalJsonNativeTools {
+ # Get the native-tools section from the global.json.
+ local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
+ # Only extract the contents of the object.
+ local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
+ native_tools_list=${native_tools_list//[\" ]/}
+ native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' )
+
+ local old_IFS=$IFS
+ while read -r line; do
+ # Lines are of the form: 'tool:version'
+ IFS=:
+ while read -r key value; do
+ native_assets[$key]=$value
+ done <<< "$line"
+ done <<< "$native_tools_list"
+ IFS=$old_IFS
+
+ return 0;
+}
+
+native_base_dir=$install_directory
+if [[ -z $install_directory ]]; then
+ native_base_dir=$(GetNativeInstallDirectory)
+fi
+
+install_bin="${native_base_dir}/bin"
+
+ReadGlobalJsonNativeTools
+
+if [[ ${#native_assets[@]} -eq 0 ]]; then
+ echo "No native tools defined in global.json"
+ exit 0;
+else
+ native_installer_dir="$scriptroot/native"
+ for tool in "${!native_assets[@]}"
+ do
+ tool_version=${native_assets[$tool]}
+ installer_name="install-$tool.sh"
+ installer_command="$native_installer_dir/$installer_name"
+ installer_command+=" --baseuri $base_uri"
+ installer_command+=" --installpath $install_bin"
+ installer_command+=" --version $tool_version"
+ echo $installer_command
+
+ if [[ $force = true ]]; then
+ installer_command+=" --force"
+ fi
+
+ if [[ $clean = true ]]; then
+ installer_command+=" --clean"
+ fi
+
+ $installer_command
+
+ if [[ $? != 0 ]]; then
+ echo "Execution Failed" >&2
+ exit 1
+ fi
+ done
+fi
+
+if [[ $clean = true ]]; then
+ exit 0
+fi
+
+if [[ -d $install_bin ]]; then
+ echo "Native tools are available from $install_bin"
+ echo "##vso[task.prependpath]$install_bin"
+else
+ echo "Native tools install directory does not exist, installation failed" >&2
+ exit 1
+fi
+
+exit 0
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
new file mode 100644
index 000000000..8b8bafd6a
--- /dev/null
+++ b/eng/common/internal-feed-operations.ps1
@@ -0,0 +1,135 @@
+param(
+ [Parameter(Mandatory=$true)][string] $Operation,
+ [string] $AuthToken,
+ [string] $CommitSha,
+ [string] $RepoName,
+ [switch] $IsFeedPrivate
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+
+. $PSScriptRoot\tools.ps1
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
+# internal builds
+function SetupCredProvider {
+ param(
+ [string] $AuthToken
+ )
+
+ # Install the Cred Provider NuGet plugin
+ Write-Host "Setting up Cred Provider NuGet plugin in the agent..."
+ Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
+
+ Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
+ Invoke-WebRequest $url -OutFile installcredprovider.ps1
+
+ Write-Host "Installing plugin..."
+ .\installcredprovider.ps1 -Force
+
+ Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
+ Remove-Item .\installcredprovider.ps1
+
+ if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
+ Write-Host "CredProvider plugin was not installed correctly!"
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host "CredProvider plugin was installed correctly!"
+ }
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ $nugetConfigPath = "$RepoRoot\NuGet.config"
+
+ if (-Not (Test-Path -Path $nugetConfigPath)) {
+ Write-Host "NuGet.config file not found in repo's root!"
+ ExitWithExitCode 1
+ }
+
+ $endpoints = New-Object System.Collections.ArrayList
+ $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
+
+ if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
+ foreach ($stableRestoreResource in $nugetConfigPackageSources) {
+ $trimmedResource = ([string]$stableRestoreResource).Trim()
+ [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
+ }
+ }
+
+ if (($endpoints | Measure-Object).Count -gt 0) {
+ # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
+
+ # Create the environment variables the AzDo way
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
+ 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
+ 'issecret' = 'false'
+ }
+
+ # We don't want sessions cached since we will be updating the endpoints quite frequently
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
+ 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
+ 'issecret' = 'false'
+ }
+ }
+ else
+ {
+ Write-Host "No internal endpoints found in NuGet.config"
+ }
+}
+
+#Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ $dotnetTempDir = "$RepoRoot\dotnet"
+ $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ $dotnet = "$dotnetTempDir\dotnet.exe"
+ $restoreProjPath = "$PSScriptRoot\restore.proj"
+
+ Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ '' | Out-File "$restoreProjPath"
+
+ & $dotnet restore $restoreProjPath
+
+ Write-Host "Arcade SDK restored!"
+
+ if (Test-Path -Path $restoreProjPath) {
+ Remove-Item $restoreProjPath
+ }
+
+ if (Test-Path -Path $dotnetTempDir) {
+ Remove-Item $dotnetTempDir -Recurse
+ }
+}
+
+try {
+ Push-Location $PSScriptRoot
+
+ if ($Operation -like "setup") {
+ SetupCredProvider $AuthToken
+ }
+ elseif ($Operation -like "install-restore") {
+ InstallDotNetSdkAndRestoreArcade
+ }
+ else {
+ Write-Host "Unknown operation '$Operation'!"
+ ExitWithExitCode 1
+ }
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
+finally {
+ Pop-Location
+}
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
new file mode 100644
index 000000000..1ff654d2f
--- /dev/null
+++ b/eng/common/internal-feed-operations.sh
@@ -0,0 +1,142 @@
+#!/usr/bin/env bash
+
+set -e
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
+# This should ONLY be called from identified internal builds
+function SetupCredProvider {
+ local authToken=$1
+
+ # Install the Cred Provider NuGet plugin
+ echo "Setting up Cred Provider NuGet plugin in the agent..."...
+ echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
+
+ echo "Writing the contents of 'installcredprovider.ps1' locally..."
+ local installcredproviderPath="installcredprovider.sh"
+ if command -v curl > /dev/null; then
+ curl $url > "$installcredproviderPath"
+ else
+ wget -q -O "$installcredproviderPath" "$url"
+ fi
+
+ echo "Installing plugin..."
+ . "$installcredproviderPath"
+
+ echo "Deleting local copy of 'installcredprovider.sh'..."
+ rm installcredprovider.sh
+
+ if [ ! -d "$HOME/.nuget/plugins" ]; then
+ echo "CredProvider plugin was not installed correctly!"
+ ExitWithExitCode 1
+ else
+ echo "CredProvider plugin was installed correctly!"
+ fi
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ local nugetConfigPath="$repo_root/NuGet.config"
+
+ if [ ! "$nugetConfigPath" ]; then
+ echo "NuGet.config file not found in repo's root!"
+ ExitWithExitCode 1
+ fi
+
+ local endpoints='['
+ local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
+ local pattern="value=\"(.*)\""
+
+ for value in $nugetConfigPackageValues
+ do
+ if [[ $value =~ $pattern ]]; then
+ local endpoint="${BASH_REMATCH[1]}"
+ endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
+ fi
+ done
+
+ endpoints=${endpoints%?}
+ endpoints+=']'
+
+ if [ ${#endpoints} -gt 2 ]; then
+ # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
+
+ echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
+ echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
+ else
+ echo "No internal endpoints found in NuGet.config"
+ fi
+}
+
+# Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ local dotnetTempDir="$repo_root/dotnet"
+ local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ local restoreProjPath="$repo_root/eng/common/restore.proj"
+
+ echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ echo "" > "$restoreProjPath"
+
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
+ echo "Arcade SDK restored!"
+
+ # Cleanup
+ if [ "$restoreProjPath" ]; then
+ rm "$restoreProjPath"
+ fi
+
+ if [ "$dotnetTempDir" ]; then
+ rm -r $dotnetTempDir
+ fi
+}
+
+source="${BASH_SOURCE[0]}"
+operation=''
+authToken=''
+repoName=''
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ --operation)
+ operation=$2
+ shift
+ ;;
+ --authtoken)
+ authToken=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ "$operation" = "setup" ]; then
+ SetupCredProvider $authToken
+elif [ "$operation" = "install-restore" ]; then
+ InstallDotNetSdkAndRestoreArcade
+else
+ echo "Unknown operation '$operation'!"
+fi
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
new file mode 100644
index 000000000..e33179ef3
--- /dev/null
+++ b/eng/common/internal/Directory.Build.props
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
new file mode 100644
index 000000000..1a39a7ef3
--- /dev/null
+++ b/eng/common/internal/Tools.csproj
@@ -0,0 +1,27 @@
+
+
+
+
+ net472
+ false
+
+
+
+
+
+
+
+
+
+
+ https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
+
+
+ $(RestoreSources);
+ https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
+
+
+
+
+
+
diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
new file mode 100644
index 000000000..b37fd3d5e
--- /dev/null
+++ b/eng/common/msbuild.ps1
@@ -0,0 +1,27 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = "minimal",
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
+)
+
+. $PSScriptRoot\tools.ps1
+
+try {
+ if ($ci) {
+ $nodeReuse = $false
+ }
+
+ MSBuild @extraArgs
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
\ No newline at end of file
diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh
new file mode 100644
index 000000000..8160cd5a5
--- /dev/null
+++ b/eng/common/msbuild.sh
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+verbosity='minimal'
+warn_as_error=true
+node_reuse=true
+prepare_machine=false
+extra_args=''
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --verbosity)
+ verbosity=$2
+ shift 2
+ ;;
+ --warnaserror)
+ warn_as_error=$2
+ shift 2
+ ;;
+ --nodereuse)
+ node_reuse=$2
+ shift 2
+ ;;
+ --ci)
+ ci=true
+ shift 1
+ ;;
+ --preparemachine)
+ prepare_machine=true
+ shift 1
+ ;;
+ *)
+ extra_args="$extra_args $1"
+ shift 1
+ ;;
+ esac
+done
+
+. "$scriptroot/tools.sh"
+
+if [[ "$ci" == true ]]; then
+ node_reuse=false
+fi
+
+MSBuild $extra_args
+ExitWithExitCode 0
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
new file mode 100644
index 000000000..41416862d
--- /dev/null
+++ b/eng/common/native/CommonLibrary.psm1
@@ -0,0 +1,389 @@
+<#
+.SYNOPSIS
+Helper module to install an archive to a directory
+
+.DESCRIPTION
+Helper module to download and extract an archive to a specified directory
+
+.PARAMETER Uri
+Uri of artifact to download
+
+.PARAMETER InstallDirectory
+Directory to extract artifact contents to
+
+.PARAMETER Force
+Force download / extraction if file or contents already exist. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds. Default = 30
+
+.NOTES
+Returns False if download or extraction fail, True otherwise
+#>
+function DownloadAndExtract {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallDirectory,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+ )
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+
+ # Download native tool
+ $DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($DownloadStatus -Eq $False) {
+ Write-Error "Download failed"
+ return $False
+ }
+
+ # Extract native tool
+ $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($UnzipStatus -Eq $False) {
+ # Retry Download one more time with Force=true
+ $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries 1 `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$True `
+ -Verbose:$Verbose
+
+ if ($DownloadRetryStatus -Eq $False) {
+ Write-Error "Last attempt of download failed as well"
+ return $False
+ }
+
+ # Retry unzip again one more time with Force=true
+ $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$True `
+ -Verbose:$Verbose
+ if ($UnzipRetryStatus -Eq $False)
+ {
+ Write-Error "Last attempt of unzip failed as well"
+ # Clean up partial zips and extracts
+ if (Test-Path $TempToolPath) {
+ Remove-Item $TempToolPath -Force
+ }
+ if (Test-Path $InstallDirectory) {
+ Remove-Item $InstallDirectory -Force -Recurse
+ }
+ return $False
+ }
+ }
+
+ return $True
+}
+
+<#
+.SYNOPSIS
+Download a file, retry on failure
+
+.DESCRIPTION
+Download specified file and retry if attempt fails
+
+.PARAMETER Uri
+Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
+
+.PARAMETER Path
+Path to download or copy uri file to
+
+.PARAMETER Force
+Overwrite existing file if present. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds Default = 30
+
+#>
+function Get-File {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $Path,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [switch] $Force = $False
+ )
+ $Attempt = 0
+
+ if ($Force) {
+ if (Test-Path $Path) {
+ Remove-Item $Path -Force
+ }
+ }
+ if (Test-Path $Path) {
+ Write-Host "File '$Path' already exists, skipping download"
+ return $True
+ }
+
+ $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
+ if (-Not (Test-Path $DownloadDirectory)) {
+ New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
+ }
+
+ if (Test-Path -IsValid -Path $Uri) {
+ Write-Verbose "'$Uri' is a file path, copying file to '$Path'"
+ Copy-Item -Path $Uri -Destination $Path
+ return $?
+ }
+ else {
+ Write-Verbose "Downloading $Uri"
+ # Don't display the console progress UI - it's a huge perf hit
+ $ProgressPreference = 'SilentlyContinue'
+ while($Attempt -Lt $DownloadRetries)
+ {
+ try {
+ Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path
+ Write-Verbose "Downloaded to '$Path'"
+ return $True
+ }
+ catch {
+ $Attempt++
+ if ($Attempt -Lt $DownloadRetries) {
+ $AttemptsLeft = $DownloadRetries - $Attempt
+ Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
+ Start-Sleep -Seconds $RetryWaitTimeInSeconds
+ }
+ else {
+ Write-Error $_
+ Write-Error $_.Exception
+ }
+ }
+ }
+ }
+
+ return $False
+}
+
+<#
+.SYNOPSIS
+Generate a shim for a native tool
+
+.DESCRIPTION
+Creates a wrapper script (shim) that passes arguments forward to native tool assembly
+
+.PARAMETER ShimName
+The name of the shim
+
+.PARAMETER ShimDirectory
+The directory where shims are stored
+
+.PARAMETER ToolFilePath
+Path to file that shim forwards to
+
+.PARAMETER Force
+Replace shim if already present. Default = False
+
+.NOTES
+Returns $True if generating shim succeeds, $False otherwise
+#>
+function New-ScriptShim {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ShimName,
+ [Parameter(Mandatory=$True)]
+ [string] $ShimDirectory,
+ [Parameter(Mandatory=$True)]
+ [string] $ToolFilePath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [switch] $Force
+ )
+ try {
+ Write-Verbose "Generating '$ShimName' shim"
+
+ if (-Not (Test-Path $ToolFilePath)){
+ Write-Error "Specified tool file path '$ToolFilePath' does not exist"
+ return $False
+ }
+
+ # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
+ # Many of the checks for installed programs expect a .exe extension for Windows tools, rather
+ # than a .bat or .cmd file.
+ # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
+ if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
+ $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
+ -InstallDirectory $ShimDirectory\WinShimmer `
+ -Force:$Force `
+ -DownloadRetries 2 `
+ -RetryWaitTimeInSeconds 5 `
+ -Verbose:$Verbose
+ }
+
+ if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
+ Write-Host "$ShimName.exe already exists; replacing..."
+ Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
+ }
+
+ & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory
+ return $True
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ return $False
+ }
+}
+
+<#
+.SYNOPSIS
+Returns the machine architecture of the host machine
+
+.NOTES
+Returns 'x64' on 64 bit machines
+ Returns 'x86' on 32 bit machines
+#>
+function Get-MachineArchitecture {
+ $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
+ $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
+ if($ProcessorArchitecture -Eq "X86")
+ {
+ if(($ProcessorArchitectureW6432 -Eq "") -Or
+ ($ProcessorArchitectureW6432 -Eq "X86")) {
+ return "x86"
+ }
+ $ProcessorArchitecture = $ProcessorArchitectureW6432
+ }
+ if (($ProcessorArchitecture -Eq "AMD64") -Or
+ ($ProcessorArchitecture -Eq "IA64") -Or
+ ($ProcessorArchitecture -Eq "ARM64")) {
+ return "x64"
+ }
+ return "x86"
+}
+
+<#
+.SYNOPSIS
+Get the name of a temporary folder under the native install directory
+#>
+function Get-TempDirectory {
+ return Join-Path (Get-NativeInstallDirectory) "temp/"
+}
+
+function Get-TempPathFilename {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Path
+ )
+ $TempDir = CommonLibrary\Get-TempDirectory
+ $TempFilename = Split-Path $Path -leaf
+ $TempPath = Join-Path $TempDir $TempFilename
+ return $TempPath
+}
+
+<#
+.SYNOPSIS
+Returns the base directory to use for native tool installation
+
+.NOTES
+Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
+is set, or otherwise returns an install directory under the %USERPROFILE%
+#>
+function Get-NativeInstallDirectory {
+ $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
+ if (!$InstallDir) {
+ $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
+ }
+ return $InstallDir
+}
+
+<#
+.SYNOPSIS
+Unzip an archive
+
+.DESCRIPTION
+Powershell module to unzip an archive to a specified directory
+
+.PARAMETER ZipPath (Required)
+Path to archive to unzip
+
+.PARAMETER OutputDirectory (Required)
+Output directory for archive contents
+
+.PARAMETER Force
+Overwrite output directory contents if they already exist
+
+.NOTES
+- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
+- Returns True if unzip operation is successful
+- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
+- Returns False if unable to extract zip archive
+#>
+function Expand-Zip {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ZipPath,
+ [Parameter(Mandatory=$True)]
+ [string] $OutputDirectory,
+ [switch] $Force
+ )
+
+ Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
+ try {
+ if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
+ Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
+ return $True
+ }
+ if (Test-Path $OutputDirectory) {
+ Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
+ Remove-Item $OutputDirectory -Force -Recurse
+ if ($? -Eq $False) {
+ Write-Error "Unable to remove '$OutputDirectory'"
+ return $False
+ }
+ }
+ if (-Not (Test-Path $OutputDirectory)) {
+ New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null
+ }
+
+ Add-Type -assembly "system.io.compression.filesystem"
+ [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory")
+ if ($? -Eq $False) {
+ Write-Error "Unable to extract '$ZipPath'"
+ return $False
+ }
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+
+ return $False
+ }
+ return $True
+}
+
+export-modulemember -function DownloadAndExtract
+export-modulemember -function Expand-Zip
+export-modulemember -function Get-File
+export-modulemember -function Get-MachineArchitecture
+export-modulemember -function Get-NativeInstallDirectory
+export-modulemember -function Get-TempDirectory
+export-modulemember -function Get-TempPathFilename
+export-modulemember -function New-ScriptShim
diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh
new file mode 100644
index 000000000..271bddfac
--- /dev/null
+++ b/eng/common/native/common-library.sh
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+
+function GetNativeInstallDirectory {
+ local install_dir
+
+ if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
+ install_dir=$HOME/.netcoreeng/native/
+ else
+ install_dir=$NETCOREENG_INSTALL_DIRECTORY
+ fi
+
+ echo $install_dir
+ return 0
+}
+
+function GetTempDirectory {
+
+ echo $(GetNativeInstallDirectory)temp/
+ return 0
+}
+
+function ExpandZip {
+ local zip_path=$1
+ local output_directory=$2
+ local force=${3:-false}
+
+ echo "Extracting $zip_path to $output_directory"
+ if [[ -d $output_directory ]] && [[ $force = false ]]; then
+ echo "Directory '$output_directory' already exists, skipping extract"
+ return 0
+ fi
+
+ if [[ -d $output_directory ]]; then
+ echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
+ rm -rf $output_directory
+ if [[ $? != 0 ]]; then
+ echo Unable to remove '$output_directory'>&2
+ return 1
+ fi
+ fi
+
+ echo "Creating directory: '$output_directory'"
+ mkdir -p $output_directory
+
+ echo "Extracting archive"
+ tar -xf $zip_path -C $output_directory
+ if [[ $? != 0 ]]; then
+ echo "Unable to extract '$zip_path'" >&2
+ return 1
+ fi
+
+ return 0
+}
+
+function GetCurrentOS {
+ local unameOut="$(uname -s)"
+ case $unameOut in
+ Linux*) echo "Linux";;
+ Darwin*) echo "MacOS";;
+ esac
+ return 0
+}
+
+function GetFile {
+ local uri=$1
+ local path=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ if [[ -f $path ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$path' already exists. Skipping download"
+ return 0
+ else
+ rm -rf $path
+ fi
+ fi
+
+ if [[ -f $uri ]]; then
+ echo "'$uri' is a file path, copying file to '$path'"
+ cp $uri $path
+ return $?
+ fi
+
+ echo "Downloading $uri"
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
+ else
+ wget -q -O "$path" "$uri" --tries="$download_retries"
+ fi
+
+ return $?
+}
+
+function GetTempPathFileName {
+ local path=$1
+
+ local temp_dir=$(GetTempDirectory)
+ local temp_file_name=$(basename $path)
+ echo $temp_dir$temp_file_name
+ return 0
+}
+
+function DownloadAndExtract {
+ local uri=$1
+ local installDir=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ local temp_tool_path=$(GetTempPathFileName $uri)
+
+ echo "downloading to: $temp_tool_path"
+
+ # Download file
+ GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ echo "Failed to download '$uri' to '$temp_tool_path'." >&2
+ return 1
+ fi
+
+ # Extract File
+ echo "extracting from $temp_tool_path to $installDir"
+ ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2
+ return 1
+ fi
+
+ return 0
+}
+
+function NewScriptShim {
+ local shimpath=$1
+ local tool_file_path=$2
+ local force=${3:-false}
+
+ echo "Generating '$shimpath' shim"
+ if [[ -f $shimpath ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$shimpath' already exists." >&2
+ return 1
+ else
+ rm -rf $shimpath
+ fi
+ fi
+
+ if [[ ! -f $tool_file_path ]]; then
+ echo "Specified tool file path:'$tool_file_path' does not exist" >&2
+ return 1
+ fi
+
+ local shim_contents=$'#!/usr/bin/env bash\n'
+ shim_contents+="SHIMARGS="$'$1\n'
+ shim_contents+="$tool_file_path"$' $SHIMARGS\n'
+
+ # Write shim file
+ echo "$shim_contents" > $shimpath
+
+ chmod +x $shimpath
+
+ echo "Finished generating shim '$shimpath'"
+
+ return $?
+}
+
diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh
new file mode 100644
index 000000000..53ddf4e68
--- /dev/null
+++ b/eng/common/native/install-cmake-test.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake-test"
+tool_os=$(GetCurrentOS)
+tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ echo "Installation failed" >&2
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ echo "Shim generation failed" >&2
+ exit 1
+fi
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh
new file mode 100644
index 000000000..5f1a182fa
--- /dev/null
+++ b/eng/common/native/install-cmake.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake"
+tool_os=$(GetCurrentOS)
+tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ echo "Installation failed" >&2
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ echo "Shim generation failed" >&2
+ exit 1
+fi
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1
new file mode 100644
index 000000000..635ab3fd4
--- /dev/null
+++ b/eng/common/native/install-tool.ps1
@@ -0,0 +1,130 @@
+<#
+.SYNOPSIS
+Install native tool
+
+.DESCRIPTION
+Install cmake native tool from Azure blob storage
+
+.PARAMETER InstallPath
+Base directory to install native tool to
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER CommonLibraryDirectory
+Path to folder containing common library modules
+
+.PARAMETER Force
+Force install of tools even if they previously exist
+
+.PARAMETER Clean
+Don't install the tool, just clean up the current install of the tool
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.NOTES
+Returns 0 if install succeeds, 1 otherwise
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ToolName,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallPath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [Parameter(Mandatory=$True)]
+ [string] $Version,
+ [string] $CommonLibraryDirectory = $PSScriptRoot,
+ [switch] $Force = $False,
+ [switch] $Clean = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+)
+
+# Import common library modules
+Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $Arch = CommonLibrary\Get-MachineArchitecture
+ $ToolOs = "win64"
+ if($Arch -Eq "x32") {
+ $ToolOs = "win32"
+ }
+ $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
+ $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
+ $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
+ $ShimPath = Join-Path $InstallPath "$ToolName.exe"
+
+ if ($Clean) {
+ Write-Host "Cleaning $ToolInstallDirectory"
+ if (Test-Path $ToolInstallDirectory) {
+ Remove-Item $ToolInstallDirectory -Force -Recurse
+ }
+ Write-Host "Cleaning $ShimPath"
+ if (Test-Path $ShimPath) {
+ Remove-Item $ShimPath -Force
+ }
+ $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+ Write-Host "Cleaning $ToolTempPath"
+ if (Test-Path $ToolTempPath) {
+ Remove-Item $ToolTempPath -Force
+ }
+ exit 0
+ }
+
+ # Install tool
+ if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
+ Write-Verbose "$ToolName ($Version) already exists, skipping install"
+ }
+ else {
+ $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
+ -InstallDirectory $ToolInstallDirectory `
+ -Force:$Force `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Verbose:$Verbose
+
+ if ($InstallStatus -Eq $False) {
+ Write-Error "Installation failed"
+ exit 1
+ }
+ }
+
+ $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
+ if (@($ToolFilePath).Length -Gt 1) {
+ Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
+ exit 1
+ } elseif (@($ToolFilePath).Length -Lt 1) {
+ Write-Error "$ToolName was not found in $ToolFilePath."
+ exit 1
+ }
+
+ # Generate shim
+ # Always rewrite shims so that we are referencing the expected version
+ $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
+ -ShimDirectory $InstallPath `
+ -ToolFilePath "$ToolFilePath" `
+ -BaseUri $BaseUri `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($GenerateShimStatus -Eq $False) {
+ Write-Error "Generate shim failed"
+ return 1
+ }
+
+ exit 0
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ exit 1
+}
diff --git a/eng/common/performance/perfhelixpublish.proj b/eng/common/performance/perfhelixpublish.proj
new file mode 100644
index 000000000..e5826b532
--- /dev/null
+++ b/eng/common/performance/perfhelixpublish.proj
@@ -0,0 +1,102 @@
+
+
+
+ %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)
+ --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%
+ py -3
+ %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
+ %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
+ $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd
+ %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts
+ %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj
+ %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe
+ %25%25
+ %HELIX_WORKITEM_ROOT%\testResults.xml
+
+
+
+ $HELIX_CORRELATION_PAYLOAD
+ $(BaseDirectory)/performance
+
+
+
+ $HELIX_WORKITEM_PAYLOAD
+ $(BaseDirectory)
+
+
+
+ $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)
+ --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP
+ python3
+ $(BaseDirectory)/Core_Root/corerun
+ $(BaseDirectory)/Baseline_Core_Root/corerun
+ $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh
+ $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts
+ $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline
+ $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj
+ $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet
+ %25
+ $HELIX_WORKITEM_ROOT/testResults.xml
+
+
+
+ --corerun $(CoreRun)
+
+
+
+ --corerun $(BaselineCoreRun)
+
+
+
+ $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)
+
+
+
+ $(WorkItemCommand) $(CliArguments)
+
+
+
+
+ %(Identity)
+
+
+
+
+ 5
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
+ 4:00
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
+ 4:00
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
new file mode 100644
index 000000000..ec41965fc
--- /dev/null
+++ b/eng/common/performance/performance-setup.ps1
@@ -0,0 +1,106 @@
+Param(
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
+ [string] $CoreRootDirectory,
+ [string] $BaselineCoreRootDirectory,
+ [string] $Architecture="x64",
+ [string] $Framework="netcoreapp5.0",
+ [string] $CompilationMode="Tiered",
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME,
+ [string] $Branch=$env:BUILD_SOURCEBRANCH,
+ [string] $CommitSha=$env:BUILD_SOURCEVERSION,
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
+ [string] $RunCategories="coreclr corefx",
+ [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
+ [string] $Kind="micro",
+ [switch] $Internal,
+ [switch] $Compare,
+ [string] $Configurations="CompilationMode=$CompilationMode"
+)
+
+$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
+$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
+$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
+
+$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
+$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
+$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
+$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+$Creator = $env:BUILD_DEFINITIONNAME
+$PerfLabArguments = ""
+$HelixSourcePrefix = "pr"
+
+$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
+
+if ($Framework.StartsWith("netcoreapp")) {
+ $Queue = "Windows.10.Amd64.ClientRS5.Open"
+}
+
+if ($Compare) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
+ $PerfLabArguments = ""
+ $ExtraBenchmarkDotNetArguments = ""
+}
+
+if ($Internal) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
+ $PerfLabArguments = "--upload-to-perflab-container"
+ $ExtraBenchmarkDotNetArguments = ""
+ $Creator = ""
+ $HelixSourcePrefix = "official"
+}
+
+$CommonSetupArguments="--frameworks $Framework --queue $Queue --build-number $BuildNumber --build-configs $Configurations"
+$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
+
+if ($RunFromPerformanceRepo) {
+ $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
+
+ robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
+}
+else {
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
+}
+
+if ($UseCoreRun) {
+ $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
+ Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
+}
+if ($UseBaselineCoreRun) {
+ $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
+ Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
+}
+
+$DocsDir = (Join-Path $PerformanceDirectory "docs")
+robocopy $DocsDir $WorkItemDirectory
+
+# Set variables that we will need to have in future steps
+$ci = $true
+
+. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
+
+# Directories
+Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
+
+# Script Arguments
+Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
+
+# Helix Arguments
+Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
new file mode 100644
index 000000000..2f2092166
--- /dev/null
+++ b/eng/common/performance/performance-setup.sh
@@ -0,0 +1,216 @@
+#!/usr/bin/env bash
+
+source_directory=$BUILD_SOURCESDIRECTORY
+core_root_directory=
+baseline_core_root_directory=
+architecture=x64
+framework=netcoreapp5.0
+compilation_mode=tiered
+repository=$BUILD_REPOSITORY_NAME
+branch=$BUILD_SOURCEBRANCH
+commit_sha=$BUILD_SOURCEVERSION
+build_number=$BUILD_BUILDNUMBER
+internal=false
+compare=false
+kind="micro"
+run_categories="coreclr corefx"
+csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
+configurations=
+run_from_perf_repo=false
+use_core_run=true
+use_baseline_core_run=true
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --sourcedirectory)
+ source_directory=$2
+ shift 2
+ ;;
+ --corerootdirectory)
+ core_root_directory=$2
+ shift 2
+ ;;
+ --baselinecorerootdirectory)
+ baseline_core_root_directory=$2
+ shift 2
+ ;;
+ --architecture)
+ architecture=$2
+ shift 2
+ ;;
+ --framework)
+ framework=$2
+ shift 2
+ ;;
+ --compilationmode)
+ compilation_mode=$2
+ shift 2
+ ;;
+ --repository)
+ repository=$2
+ shift 2
+ ;;
+ --branch)
+ branch=$2
+ shift 2
+ ;;
+ --commitsha)
+ commit_sha=$2
+ shift 2
+ ;;
+ --buildnumber)
+ build_number=$2
+ shift 2
+ ;;
+ --kind)
+ kind=$2
+ shift 2
+ ;;
+ --runcategories)
+ run_categories=$2
+ shift 2
+ ;;
+ --csproj)
+ csproj=$2
+ shift 2
+ ;;
+ --internal)
+ internal=true
+ shift 1
+ ;;
+ --compare)
+ compare=true
+ shift 1
+ ;;
+ --configurations)
+ configurations=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
+ echo " --architecture Architecture of the testing being run"
+ echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
+ echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --framework The framework to run, if not running in master"
+ echo " --compliationmode The compilation mode if not passing --configurations"
+ echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
+ echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
+ echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
+ echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
+ echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
+ echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
+ echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
+ echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
+ echo " --internal If the benchmarks are running as an official job."
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
+ run_from_perf_repo=true
+fi
+
+if [ -z "$configurations" ]; then
+ configurations="CompliationMode=$compilation_mode"
+fi
+
+if [ -z "$core_root_directory" ]; then
+ use_core_run=false
+fi
+
+if [ -z "$baseline_core_root_directory" ]; then
+ use_baseline_core_run=false
+fi
+
+payload_directory=$source_directory/Payload
+performance_directory=$payload_directory/performance
+workitem_directory=$source_directory/workitem
+extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+perflab_arguments=
+queue=Ubuntu.1804.Amd64.Open
+creator=$BUILD_DEFINITIONNAME
+helix_source_prefix="pr"
+
+if [[ "$compare" == true ]]; then
+ extra_benchmark_dotnet_arguments=
+ perflab_arguments=
+
+ # No open queues for arm64
+ if [[ "$architecture" = "arm64" ]]; then
+ echo "Compare not available for arm64"
+ exit 1
+ fi
+
+ queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
+fi
+
+if [[ "$internal" == true ]]; then
+ perflab_arguments="--upload-to-perflab-container"
+ helix_source_prefix="official"
+ creator=
+ extra_benchmark_dotnet_arguments=
+
+ if [[ "$architecture" = "arm64" ]]; then
+ queue=Ubuntu.1804.Arm64.Perf
+ else
+ queue=Ubuntu.1804.Amd64.Tiger.Perf
+ fi
+fi
+
+common_setup_arguments="--frameworks $framework --queue $queue --build-number $build_number --build-configs $configurations"
+setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
+
+if [[ "$run_from_perf_repo" = true ]]; then
+ payload_directory=
+ workitem_directory=$source_directory
+ performance_directory=$workitem_directory
+ setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
+else
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
+
+ docs_directory=$performance_directory/docs
+ mv $docs_directory $workitem_directory
+fi
+
+if [[ "$use_core_run" = true ]]; then
+ new_core_root=$payload_directory/Core_Root
+ mv $core_root_directory $new_core_root
+fi
+
+if [[ "$use_baseline_core_run" = true ]]; then
+ new_baseline_core_root=$payload_directory/Baseline_Core_Root
+ mv $baseline_core_root_directory $new_baseline_core_root
+fi
+
+ci=true
+
+_script_dir=$(pwd)/eng/common
+. "$_script_dir/pipeline-logging-functions.sh"
+
+# Make sure all of our variables are available for future steps
+Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
+Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Python" -value "$python3" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
+Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
+Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
+Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
+Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1
new file mode 100644
index 000000000..af5f48aac
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.ps1
@@ -0,0 +1,234 @@
+# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
+
+# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
+
+$script:loggingCommandPrefix = '##vso['
+$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
+ New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
+ New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
+ New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
+ New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
+)
+# TODO: BUG: Escape % ???
+# TODO: Add test to verify don't need to escape "=".
+
+function Write-PipelineTelemetryError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Category,
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput)
+
+ $PSBoundParameters.Remove("Category") | Out-Null
+
+ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+ $PSBoundParameters.Remove("Message") | Out-Null
+ $PSBoundParameters.Add("Message", $Message)
+
+ Write-PipelineTaskError @PSBoundParameters
+}
+
+function Write-PipelineTaskError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput)
+
+ if(!$ci) {
+ if($Type -eq 'error') {
+ Write-Host $Message -ForegroundColor Red
+ return
+ }
+ elseif ($Type -eq 'warning') {
+ Write-Host $Message -ForegroundColor Yellow
+ return
+ }
+ }
+
+ if(($Type -ne 'error') -and ($Type -ne 'warning')) {
+ Write-Host $Message
+ return
+ }
+ if(-not $PSBoundParameters.ContainsKey('Type')) {
+ $PSBoundParameters.Add('Type', 'error')
+ }
+ Write-LogIssue @PSBoundParameters
+ }
+
+ function Write-PipelineSetVariable {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Name,
+ [string]$Value,
+ [switch]$Secret,
+ [switch]$AsOutput,
+ [bool]$IsMultiJobVariable=$true)
+
+ if($ci) {
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
+ 'variable' = $Name
+ 'isSecret' = $Secret
+ 'isOutput' = $IsMultiJobVariable
+ } -AsOutput:$AsOutput
+ }
+ }
+
+ function Write-PipelinePrependPath {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory=$true)]
+ [string]$Path,
+ [switch]$AsOutput)
+ if($ci) {
+ Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
+ }
+ }
+
+<########################################
+# Private functions.
+########################################>
+function Format-LoggingCommandData {
+ [CmdletBinding()]
+ param([string]$Value, [switch]$Reverse)
+
+ if (!$Value) {
+ return ''
+ }
+
+ if (!$Reverse) {
+ foreach ($mapping in $script:loggingCommandEscapeMappings) {
+ $Value = $Value.Replace($mapping.Token, $mapping.Replacement)
+ }
+ } else {
+ for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
+ $mapping = $script:loggingCommandEscapeMappings[$i]
+ $Value = $Value.Replace($mapping.Replacement, $mapping.Token)
+ }
+ }
+
+ return $Value
+}
+
+function Format-LoggingCommand {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Area,
+ [Parameter(Mandatory = $true)]
+ [string]$Event,
+ [string]$Data,
+ [hashtable]$Properties)
+
+ # Append the preamble.
+ [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
+ $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
+
+ # Append the properties.
+ if ($Properties) {
+ $first = $true
+ foreach ($key in $Properties.Keys) {
+ [string]$value = Format-LoggingCommandData $Properties[$key]
+ if ($value) {
+ if ($first) {
+ $null = $sb.Append(' ')
+ $first = $false
+ } else {
+ $null = $sb.Append(';')
+ }
+
+ $null = $sb.Append("$key=$value")
+ }
+ }
+ }
+
+ # Append the tail and output the value.
+ $Data = Format-LoggingCommandData $Data
+ $sb.Append(']').Append($Data).ToString()
+}
+
+function Write-LoggingCommand {
+ [CmdletBinding(DefaultParameterSetName = 'Parameters')]
+ param(
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Area,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Event,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [string]$Data,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [hashtable]$Properties,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Object')]
+ $Command,
+ [switch]$AsOutput)
+
+ if ($PSCmdlet.ParameterSetName -eq 'Object') {
+ Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
+ return
+ }
+
+ $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
+ if ($AsOutput) {
+ $command
+ } else {
+ Write-Host $command
+ }
+}
+
+function Write-LogIssue {
+ [CmdletBinding()]
+ param(
+ [ValidateSet('warning', 'error')]
+ [Parameter(Mandatory = $true)]
+ [string]$Type,
+ [string]$Message,
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput)
+
+ $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
+ 'type' = $Type
+ 'code' = $ErrCode
+ 'sourcepath' = $SourcePath
+ 'linenumber' = $LineNumber
+ 'columnnumber' = $ColumnNumber
+ }
+ if ($AsOutput) {
+ return $command
+ }
+
+ if ($Type -eq 'error') {
+ $foregroundColor = $host.PrivateData.ErrorForegroundColor
+ $backgroundColor = $host.PrivateData.ErrorBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Red
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ } else {
+ $foregroundColor = $host.PrivateData.WarningForegroundColor
+ $backgroundColor = $host.PrivateData.WarningBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Yellow
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ }
+
+ Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
+}
\ No newline at end of file
diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
new file mode 100644
index 000000000..1c560a506
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.sh
@@ -0,0 +1,172 @@
+#!/usr/bin/env bash
+
+function Write-PipelineTelemetryError {
+ local telemetry_category=''
+ local function_args=()
+ local message=''
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -category|-c)
+ telemetry_category=$2
+ shift
+ ;;
+ -*)
+ function_args+=("$1 $2")
+ shift
+ ;;
+ *)
+ message=$*
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$ci" != true ]]; then
+ echo "$message" >&2
+ return
+ fi
+
+ message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
+ function_args+=("$message")
+
+ Write-PipelineTaskError $function_args
+}
+
+function Write-PipelineTaskError {
+ if [[ "$ci" != true ]]; then
+ echo "$@" >&2
+ return
+ fi
+
+ local message_type="error"
+ local sourcepath=''
+ local linenumber=''
+ local columnnumber=''
+ local error_code=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -type|-t)
+ message_type=$2
+ shift
+ ;;
+ -sourcepath|-s)
+ sourcepath=$2
+ shift
+ ;;
+ -linenumber|-ln)
+ linenumber=$2
+ shift
+ ;;
+ -columnnumber|-cn)
+ columnnumber=$2
+ shift
+ ;;
+ -errcode|-e)
+ error_code=$2
+ shift
+ ;;
+ *)
+ break
+ ;;
+ esac
+
+ shift
+ done
+
+ local message="##vso[task.logissue"
+
+ message="$message type=$message_type"
+
+ if [ -n "$sourcepath" ]; then
+ message="$message;sourcepath=$sourcepath"
+ fi
+
+ if [ -n "$linenumber" ]; then
+ message="$message;linenumber=$linenumber"
+ fi
+
+ if [ -n "$columnnumber" ]; then
+ message="$message;columnnumber=$columnnumber"
+ fi
+
+ if [ -n "$error_code" ]; then
+ message="$message;code=$error_code"
+ fi
+
+ message="$message]$*"
+ echo "$message"
+}
+
+function Write-PipelineSetVariable {
+ if [[ "$ci" != true ]]; then
+ return
+ fi
+
+ local name=''
+ local value=''
+ local secret=false
+ local as_output=false
+ local is_multi_job_variable=true
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -name|-n)
+ name=$2
+ shift
+ ;;
+ -value|-v)
+ value=$2
+ shift
+ ;;
+ -secret|-s)
+ secret=true
+ ;;
+ -as_output|-a)
+ as_output=true
+ ;;
+ -is_multi_job_variable|-i)
+ is_multi_job_variable=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ value=${value/;/%3B}
+ value=${value/\\r/%0D}
+ value=${value/\\n/%0A}
+ value=${value/]/%5D}
+
+ local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
+
+ if [[ "$as_output" == true ]]; then
+ $message
+ else
+ echo "$message"
+ fi
+}
+
+function Write-PipelinePrependPath {
+ local prepend_path=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -path|-p)
+ prepend_path=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ export PATH="$prepend_path:$PATH"
+
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.prependpath]$prepend_path"
+ fi
+}
\ No newline at end of file
diff --git a/eng/common/post-build/darc-gather-drop.ps1 b/eng/common/post-build/darc-gather-drop.ps1
new file mode 100644
index 000000000..89854d3c1
--- /dev/null
+++ b/eng/common/post-build/darc-gather-drop.ps1
@@ -0,0 +1,45 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BarBuildId, # ID of the build which assets should be downloaded
+ [Parameter(Mandatory=$true)][string] $DropLocation, # Where the assets should be downloaded to
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, # Token used to access Maestro API
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com", # Maestro API URL
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16" # Version of Maestro API to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+try {
+ Write-Host "Installing DARC ..."
+
+ . $PSScriptRoot\..\darc-init.ps1
+ $exitCode = $LASTEXITCODE
+
+ if ($exitCode -ne 0) {
+ Write-PipelineTaskError "Something failed while running 'darc-init.ps1'. Check for errors above. Exiting now..."
+ ExitWithExitCode $exitCode
+ }
+
+ # For now, only use a dry run.
+ # Ideally we would change darc to enable a quick request that
+ # would check whether the file exists that you can download it,
+ # and that it won't conflict with other files.
+ # https://github.com/dotnet/arcade/issues/3674
+ # Right now we can't remove continue-on-error because we ocassionally will have
+ # dependencies that have no associated builds (e.g. an old dependency).
+ # We need to add an option to baseline specific dependencies away, or add them manually
+ # to the BAR.
+ darc gather-drop --non-shipping `
+ --dry-run `
+ --continue-on-error `
+ --id $BarBuildId `
+ --output-dir $DropLocation `
+ --bar-uri $MaestroApiEndpoint `
+ --password $MaestroApiAccessToken `
+ --latest-location
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
new file mode 100644
index 000000000..78ed0d540
--- /dev/null
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -0,0 +1,25 @@
+# This script validates NuGet package metadata information using this
+# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
+
+param(
+ [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
+ [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+try {
+ $url = "https://raw.githubusercontent.com/NuGet/NuGetGallery/jver-verify/src/VerifyMicrosoftPackage/verify.ps1"
+
+ New-Item -ItemType "directory" -Path ${ToolDestinationPath} -Force
+
+ Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
+
+ & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
+}
+catch {
+ Write-PipelineTaskError "NuGet package validation failed. Please check error logs."
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
new file mode 100644
index 000000000..551ae113f
--- /dev/null
+++ b/eng/common/post-build/post-build-utils.ps1
@@ -0,0 +1,90 @@
+# Most of the functions in this file require the variables `MaestroApiEndPoint`,
+# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
+function Create-MaestroApiRequestHeaders([string]$ContentType = "application/json") {
+ Validate-MaestroVars
+
+ $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $headers.Add('Accept', $ContentType)
+ $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
+ return $headers
+}
+
+function Get-MaestroChannel([int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroBuild([int]$BuildId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
+
+ $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Trigger-Subscription([string]$SubscriptionId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
+}
+
+function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
+}
+
+function Validate-MaestroVars {
+ try {
+ Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
+ Get-Variable MaestroApiVersion -Scope Global | Out-Null
+ Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
+
+ if (!($MaestroApiEndPoint -Match "^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$")) {
+ Write-PipelineTaskError "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+ ExitWithExitCode 1
+ }
+
+ if (!($MaestroApiVersion -Match "^[0-9]{4}-[0-9]{2}-[0-9]{2}$")) {
+ Write-PipelineTaskError "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
+ ExitWithExitCode 1
+ }
+ }
+ catch {
+ Write-PipelineTaskError "Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script."
+ Write-Host $_
+ ExitWithExitCode 1
+ }
+}
diff --git a/eng/common/post-build/promote-build.ps1 b/eng/common/post-build/promote-build.ps1
new file mode 100644
index 000000000..e5ae85f25
--- /dev/null
+++ b/eng/common/post-build/promote-build.ps1
@@ -0,0 +1,48 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com",
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16"
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+try {
+ # Check that the channel we are going to promote the build to exist
+ $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
+
+ if (!$channelInfo) {
+ Write-Host "Channel with BAR ID $ChannelId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Get info about which channels the build has already been promoted to
+ $buildInfo = Get-MaestroBuild -BuildId $BuildId
+
+ if (!$buildInfo) {
+ Write-Host "Build with BAR ID $BuildId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Find whether the build is already assigned to the channel or not
+ if ($buildInfo.channels) {
+ foreach ($channel in $buildInfo.channels) {
+ if ($channel.Id -eq $ChannelId) {
+ Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
+ ExitWithExitCode 0
+ }
+ }
+ }
+
+ Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
+
+ Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
+
+ Write-Host "done."
+}
+catch {
+ Write-Host "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+}
diff --git a/eng/common/post-build/setup-maestro-vars.ps1 b/eng/common/post-build/setup-maestro-vars.ps1
new file mode 100644
index 000000000..d7f64dc63
--- /dev/null
+++ b/eng/common/post-build/setup-maestro-vars.ps1
@@ -0,0 +1,26 @@
+param(
+ [Parameter(Mandatory=$true)][string] $ReleaseConfigsPath # Full path to ReleaseConfigs.txt asset
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+try {
+ $Content = Get-Content $ReleaseConfigsPath
+
+ $BarId = $Content | Select -Index 0
+
+ $Channels = ""
+ $Content | Select -Index 1 | ForEach-Object { $Channels += "$_ ," }
+
+ $IsStableBuild = $Content | Select -Index 2
+
+ Write-PipelineSetVariable -Name 'BARBuildId' -Value $BarId
+ Write-PipelineSetVariable -Name 'InitialChannels' -Value "$Channels"
+ Write-PipelineSetVariable -Name 'IsStableBuild' -Value $IsStableBuild
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
new file mode 100644
index 000000000..bbfdacca1
--- /dev/null
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -0,0 +1,257 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
+ [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
+ [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
+# in the repository at a specific commit point. This is populated by inserting
+# all files present in the repo at a specific commit point.
+$global:RepoFiles = @{}
+
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 6
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+$ValidatePackage = {
+ param(
+ [string] $PackagePath # Full path to a Symbols.NuGet package
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-Host "Input file does not exist: $PackagePath"
+ return 1
+ }
+
+ # Extensions for which we'll look for SourceLink information
+ # For now we'll only care about Portable & Embedded PDBs
+ $RelevantExtensions = @(".dll", ".exe", ".pdb")
+
+ Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... "
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+ $FailedFiles = 0
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ $Extension = [System.IO.Path]::GetExtension($_.Name)
+ $FakeName = -Join((New-Guid), $Extension)
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith(".resources.dll")) {
+ return
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+
+ $ValidateFile = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $RealPath,
+ [ref] $FailedFiles
+ )
+
+ $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
+ $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
+ $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
+
+ if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
+ $NumFailedLinks = 0
+
+ # We only care about Http addresses
+ $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
+
+ if ($Matches.Count -ne 0) {
+ $Matches.Value |
+ ForEach-Object {
+ $Link = $_
+ $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
+
+ $FilePath = $Link.Replace($CommitUrl, "")
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ # Only GitHub links are valid
+ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match "github" -or $Uri.Host -match "githubusercontent")) {
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ $Status = 0
+ }
+ }
+ catch {
+ write-host $_
+ $Status = 0
+ }
+ }
+
+ if ($Status -ne 200) {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
+ }
+ }
+ }
+
+ if ($NumFailedLinks -ne 0) {
+ $FailedFiles.value++
+ $global:LASTEXITCODE = 1
+ }
+ }
+ }
+
+ &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
+ }
+ }
+ catch {
+
+ }
+ finally {
+ $zip.Dispose()
+ }
+
+ if ($FailedFiles -eq 0) {
+ Write-Host "Passed."
+ return 0
+ }
+ else {
+ Write-Host "$PackagePath has broken SourceLink links."
+ return 1
+ }
+}
+
+function ValidateSourceLinkLinks {
+ if ($GHRepoName -ne "" -and !($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) {
+ if (!($GHRepoName -Match "^[^\s-]+-[^\s]+$")) {
+ Write-PipelineTaskError "GHRepoName should be in the format / or -. '$GHRepoName'"
+ ExitWithExitCode 1
+ }
+ else {
+ $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
+ }
+ }
+
+ if ($GHCommit -ne "" -and !($GHCommit -Match "^[0-9a-fA-F]{40}$")) {
+ Write-PipelineTaskError "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
+ ExitWithExitCode 1
+ }
+
+ if ($GHRepoName -ne "" -and $GHCommit -ne "") {
+ $RepoTreeURL = -Join("http://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1")
+ $CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript")
+
+ try {
+ # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
+ $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
+
+ foreach ($file in $Data) {
+ $Extension = [System.IO.Path]::GetExtension($file.path)
+
+ if ($CodeExtensions.Contains($Extension)) {
+ $RepoFiles[$file.path] = 1
+ }
+ }
+ }
+ catch {
+ Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
+ }
+ }
+ elseif ($GHRepoName -ne "" -or $GHCommit -ne "") {
+ Write-Host "For using the http caching mechanism both GHRepoName and GHCommit should be informed."
+ }
+
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ # Process each NuGet package in parallel
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
+ Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ Receive-Job -Id $Job.Id
+ Remove-Job -Id $Job.Id
+ }
+ }
+
+ $ValidationFailures = 0
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ if ($jobResult -ne "0") {
+ $ValidationFailures++
+ }
+ }
+ if ($ValidationFailures -gt 0) {
+ Write-PipelineTaskError " $ValidationFailures package(s) failed validation."
+ ExitWithExitCode 1
+ }
+}
+
+function InstallSourcelinkCli {
+ $sourcelinkCliPackageName = "sourcelink"
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
+ Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
+ }
+ else {
+ Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
+ Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
+ & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ InstallSourcelinkCli
+
+ ValidateSourceLinkLinks
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
new file mode 100644
index 000000000..096ac321d
--- /dev/null
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -0,0 +1,189 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion # Version of dotnet symbol to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+function FirstMatchingSymbolDescriptionOrDefault {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + "\" + $FileName
+
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, ".pdb")
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
+
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+ & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+
+ if (Test-Path $PdbPath) {
+ return "PDB"
+ }
+ elseif (Test-Path $NGenPdb) {
+ return "NGen PDB"
+ }
+ elseif (Test-Path $SODbg) {
+ return "DBG for SO"
+ }
+ elseif (Test-Path $DylibDwarf) {
+ return "Dwarf for Dylib"
+ }
+ elseif (Test-Path $SymbolPath) {
+ return "Module"
+ }
+ else {
+ return $null
+ }
+}
+
+function CountMissingSymbols {
+ param(
+ [string] $PackagePath # Path to a NuGet package
+ )
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ # Extensions for which we'll look for symbols
+ $RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
+
+ # How many files are missing symbol information
+ $MissingSymbols = 0
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $PackageGuid = New-Guid
+ $ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
+
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+
+ Get-ChildItem -Recurse $ExtractPath |
+ Where-Object {$RelevantExtensions -contains $_.Extension} |
+ ForEach-Object {
+ if ($_.FullName -Match "\\ref\\") {
+ Write-Host "`t Ignoring reference assembly file" $_.FullName
+ return
+ }
+
+ $SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
+ $SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
+
+ Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
+
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host "No symbols found on MSDL or SymWeb!"
+ }
+ else {
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host "No symbols found on MSDL!"
+ }
+ else {
+ Write-Host "No symbols found on SymWeb!"
+ }
+ }
+ }
+ }
+
+ Pop-Location
+
+ return $MissingSymbols
+}
+
+function CheckSymbolsAvailable {
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $FileName = $_.Name
+
+ # These packages from Arcade-Services include some native libraries that
+ # our current symbol uploader can't handle. Below is a workaround until
+ # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
+ if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+ elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+
+ Write-Host "Validating $FileName "
+ $Status = CountMissingSymbols "$InputPath\$FileName"
+
+ if ($Status -ne 0) {
+ Write-PipelineTaskError "Missing symbols for $Status modules in the package $FileName"
+ ExitWithExitCode $exitCode
+ }
+
+ Write-Host
+ }
+}
+
+function InstallDotnetSymbol {
+ $dotnetSymbolPackageName = "dotnet-symbol"
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
+ Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
+ }
+ else {
+ Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
+ Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
+ & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ InstallDotnetSymbol
+
+ CheckSymbolsAvailable
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
new file mode 100644
index 000000000..926d5b455
--- /dev/null
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -0,0 +1,57 @@
+param(
+ [Parameter(Mandatory=$true)][string] $SourceRepo,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = "https://maestro-prod.westus2.cloudapp.azure.com",
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = "2019-01-16"
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+# Get all the $SourceRepo subscriptions
+$normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
+$subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
+
+if (!$subscriptions) {
+ Write-Host "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
+ ExitWithExitCode 0
+}
+
+$subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
+$failedTriggeredSubscription = $false
+
+# Get all enabled subscriptions that need dependency flow on 'everyBuild'
+foreach ($subscription in $subscriptions) {
+ if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
+ Write-Host "Should trigger this subscription: $subscription.id"
+ [void]$subscriptionsToTrigger.Add($subscription.id)
+ }
+}
+
+foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
+ try {
+ Write-Host "Triggering subscription '$subscriptionToTrigger'."
+
+ Trigger-Subscription -SubscriptionId $subscriptionToTrigger
+
+ Write-Host "done."
+ }
+ catch
+ {
+ Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ $failedTriggeredSubscription = $true
+ }
+}
+
+if ($subscriptionsToTrigger.Count -eq 0) {
+ Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+}
+elseif ($failedTriggeredSubscription) {
+ Write-Host "At least one subscription failed to be triggered..."
+ ExitWithExitCode 1
+}
+else {
+ Write-Host "All subscriptions were triggered successfully!"
+}
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
new file mode 100644
index 000000000..d0eec5163
--- /dev/null
+++ b/eng/common/sdk-task.ps1
@@ -0,0 +1,79 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $configuration = "Debug",
+ [string] $task,
+ [string] $verbosity = "minimal",
+ [string] $msbuildEngine = $null,
+ [switch] $restore,
+ [switch] $prepareMachine,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+$ci = $true
+$binaryLog = $true
+$warnAsError = $true
+
+. $PSScriptRoot\tools.ps1
+
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
+ Write-Host " -restore Restore dependencies"
+ Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -prepareMachine Prepare machine for CI run"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host ""
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+}
+
+function Build([string]$target) {
+ $logSuffix = if ($target -eq "Execute") { "" } else { ".$target" }
+ $log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $outputPath = Join-Path $ToolsetDir "$task\\"
+
+ MSBuild $taskProject `
+ /bl:$log `
+ /t:$target `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:BaseIntermediateOutputPath=$outputPath `
+ @properties
+}
+
+try {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($task -eq "") {
+ Write-Host "Missing required parameter '-task '" -ForegroundColor Red
+ Print-Usage
+ ExitWithExitCode 1
+ }
+
+ $taskProject = GetSdkTaskProject $task
+ if (!(Test-Path $taskProject)) {
+ Write-Host "Unknown task: $task" -ForegroundColor Red
+ ExitWithExitCode 1
+ }
+
+ if ($restore) {
+ Build "Restore"
+ }
+
+ Build "Execute"
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
new file mode 100644
index 000000000..0c5451c11
--- /dev/null
+++ b/eng/common/sdl/NuGet.config
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
new file mode 100644
index 000000000..01799d63f
--- /dev/null
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -0,0 +1,100 @@
+Param(
+ [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
+ [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
+ [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
+ [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
+ [string] $ArtifactsDirectory = (Join-Path $env:BUILD_SOURCESDIRECTORY ("artifacts")), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
+ [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
+ [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
+ [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
+ [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
+ [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $GuardianLoggerLevel="Standard", # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
+ [string[]] $PoliCheckAdditionalRunConfigParams # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+$LASTEXITCODE = 0
+
+#Replace repo names to the format of org/repo
+if (!($Repository.contains('/'))) {
+ $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
+}
+else{
+ $RepoName = $Repository;
+}
+
+if ($GuardianPackageName) {
+ $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path "tools" "guardian.cmd"))
+} else {
+ $guardianCliLocation = $GuardianCliLocation
+}
+
+$workingDirectory = (Split-Path $SourceDirectory -Parent)
+$ValidPath = Test-Path $guardianCliLocation
+
+if ($ValidPath -eq $False)
+{
+ Write-Host "Invalid Guardian CLI Location."
+ exit 1
+}
+
+& $(Join-Path $PSScriptRoot "init-sdl.ps1") -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+$gdnFolder = Join-Path $workingDirectory ".gdn"
+
+if ($TsaOnboard) {
+ if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
+ Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+ }
+ } else {
+ Write-Host "Could not onboard to TSA -- not all required values ($$TsaCodebaseName, $$TsaNotificationEmail, $$TsaCodebaseAdmin, $$TsaBugAreaPath) were specified."
+ exit 1
+ }
+}
+
+if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+}
+if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+}
+
+if ($UpdateBaseline) {
+ & (Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Update baseline"
+}
+
+if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+ $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ }
+ Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian tsa-publish failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+ }
+ } else {
+ Write-Host "Could not publish to TSA -- not all required values ($$TsaBranchName, $$BuildNumber) were specified."
+ exit 1
+ }
+}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
new file mode 100644
index 000000000..1fdbb1432
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -0,0 +1,70 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+$ExtractPackage = {
+ param(
+ [string] $PackagePath # Full path to a NuGet package
+ )
+
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ $RelevantExtensions = @(".dll", ".exe", ".pdb")
+ Write-Host -NoNewLine "Extracting" ([System.IO.Path]::GetFileName($PackagePath)) "... "
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ }
+ }
+ catch {
+
+ }
+ finally {
+ $zip.Dispose()
+ }
+ }
+ function ExtractArtifacts {
+ if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path does not exist: $InputPath"
+ ExitWithExitCode 0
+ }
+ $Jobs = @()
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
+ }
+
+ foreach ($Job in $Jobs) {
+ Wait-Job -Id $Job.Id | Receive-Job
+ }
+}
+
+try {
+ Measure-Command { ExtractArtifacts }
+}
+catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
new file mode 100644
index 000000000..c737eb0e7
--- /dev/null
+++ b/eng/common/sdl/init-sdl.ps1
@@ -0,0 +1,51 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $Repository,
+ [string] $BranchName="master",
+ [string] $WorkingDirectory,
+ [string] $AzureDevOpsAccessToken,
+ [string] $GuardianLoggerLevel="Standard"
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+$LASTEXITCODE = 0
+
+# Don't display the console progress UI - it's a huge perf hit
+$ProgressPreference = 'SilentlyContinue'
+
+# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
+$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
+$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
+$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0-preview.1"
+$zipFile = "$WorkingDirectory/gdn.zip"
+
+Add-Type -AssemblyName System.IO.Compression.FileSystem
+$gdnFolder = (Join-Path $WorkingDirectory ".gdn")
+Try
+{
+ # We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
+ Write-Host "Downloading gdn folder from internal config repostiory..."
+ Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
+ if (Test-Path $gdnFolder) {
+ # Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
+ Remove-Item -Force -Recurse $gdnFolder
+ }
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
+ Write-Host $gdnFolder
+} Catch [System.Net.WebException] {
+ # if the folder does not exist, we'll do a guardian init and push it to the remote repository
+ Write-Host "Initializing Guardian..."
+ Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
+ & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Guardian init failed with exit code $LASTEXITCODE."
+ }
+ # We create the mainbaseline so it can be edited later
+ Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
+ & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Guardian baseline failed with exit code $LASTEXITCODE."
+ }
+ & $(Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason "Initialize gdn folder"
+}
\ No newline at end of file
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
new file mode 100644
index 000000000..256ffbfb9
--- /dev/null
+++ b/eng/common/sdl/packages.config
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1
new file mode 100644
index 000000000..79c707d6d
--- /dev/null
+++ b/eng/common/sdl/push-gdn.ps1
@@ -0,0 +1,51 @@
+Param(
+ [string] $Repository,
+ [string] $BranchName="master",
+ [string] $GdnFolder,
+ [string] $AzureDevOpsAccessToken,
+ [string] $PushReason
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+$LASTEXITCODE = 0
+
+# We create the temp directory where we'll store the sdl-config repository
+$sdlDir = Join-Path $env:TEMP "sdl"
+if (Test-Path $sdlDir) {
+ Remove-Item -Force -Recurse $sdlDir
+}
+
+Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git clone failed with exit code $LASTEXITCODE."
+}
+# We copy the .gdn folder from our local run into the git repository so it can be committed
+$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn"
+if (Get-Command Robocopy) {
+ Robocopy /S $GdnFolder $sdlRepositoryFolder
+} else {
+ rsync -r $GdnFolder $sdlRepositoryFolder
+}
+# cd to the sdl-config directory so we can run git there
+Push-Location $sdlDir
+# git add . --> git commit --> git push
+Write-Host "git add ."
+git add .
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git add failed with exit code $LASTEXITCODE."
+}
+Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git commit failed with exit code $LASTEXITCODE."
+}
+Write-Host "git push"
+git push
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git push failed with exit code $LASTEXITCODE."
+}
+
+# Return to the original directory
+Pop-Location
\ No newline at end of file
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
new file mode 100644
index 000000000..9bc25314a
--- /dev/null
+++ b/eng/common/sdl/run-sdl.ps1
@@ -0,0 +1,59 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $WorkingDirectory,
+ [string] $TargetDirectory,
+ [string] $GdnFolder,
+ [string[]] $ToolsList,
+ [string] $UpdateBaseline,
+ [string] $GuardianLoggerLevel="Standard",
+ [string[]] $CrScanAdditionalRunConfigParams,
+ [string[]] $PoliCheckAdditionalRunConfigParams
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+$LASTEXITCODE = 0
+
+# We store config files in the r directory of .gdn
+Write-Host $ToolsList
+$gdnConfigPath = Join-Path $GdnFolder "r"
+$ValidPath = Test-Path $GuardianCliLocation
+
+if ($ValidPath -eq $False)
+{
+ Write-Host "Invalid Guardian CLI Location."
+ exit 1
+}
+
+$configParam = @("--config")
+
+foreach ($tool in $ToolsList) {
+ $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
+ Write-Host $tool
+ # We have to manually configure tools that run on source to look at the source directory only
+ if ($tool -eq "credscan") {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+ }
+ }
+ if ($tool -eq "policheck") {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+ }
+ }
+
+ $configParam+=$gdnConfigFile
+}
+
+Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
+& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
+if ($LASTEXITCODE -ne 0) {
+ Write-Host "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
+ exit $LASTEXITCODE
+}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
new file mode 100644
index 000000000..a7f996419
--- /dev/null
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -0,0 +1,59 @@
+parameters:
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
+ dependsOn: '' # Optional: dependencies of the job
+
+jobs:
+- job: Run_SDL
+ dependsOn: ${{ parameters.dependsOn }}
+ displayName: Run SDL tool
+ variables:
+ - group: DotNet-VSTS-Bot
+ pool:
+ name: Hosted VS2017
+ steps:
+ - checkout: self
+ clean: true
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: current
+ downloadType: specific files
+ matchingPattern: "**"
+ downloadPath: $(Build.SourcesDirectory)\artifacts
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.SourcesDirectory)\artifacts\BlobArtifacts
+ displayName: Extract Blob Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.SourcesDirectory)\artifacts\PackageArtifacts
+ displayName: Extract Package Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+ - task: NuGetCommand@2
+ displayName: 'Install Guardian'
+ inputs:
+ restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ feedsToUse: config
+ nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
+ externalFeedCredentials: GuardianConnect
+ restoreDirectory: $(Build.SourcesDirectory)\.packages
+ - ${{ if ne(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1
+ -GuardianPackageName Microsoft.Guardian.Cli.0.7.2
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/generate-graph-files.yml b/eng/common/templates/job/generate-graph-files.yml
new file mode 100644
index 000000000..e54ce956f
--- /dev/null
+++ b/eng/common/templates/job/generate-graph-files.yml
@@ -0,0 +1,48 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+jobs:
+- job: Generate_Graph_Files
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: Generate Graph Files
+
+ pool: ${{ parameters.pool }}
+
+ variables:
+ # Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT
+ # DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat
+ - group: Publish-Build-Assets
+ - group: DotNet-AllOrgs-Darc-Pats
+ - name: _GraphArguments
+ value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT)
+ -azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat)
+ -barToken $(MaestroAccessToken)
+ -outputFolder '$(Build.StagingDirectory)/GraphFiles/'
+ - ${{ if ne(parameters.includeToolset, 'false') }}:
+ - name: _GraphArguments
+ value: ${{ variables._GraphArguments }} -includeToolset
+
+ steps:
+ - task: PowerShell@2
+ displayName: Generate Graph Files
+ inputs:
+ filePath: eng\common\generate-graph-files.ps1
+ arguments: $(_GraphArguments)
+ continueOnError: true
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Graph to Artifacts
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/GraphFiles'
+ PublishLocation: Container
+ ArtifactName: GraphFiles
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
new file mode 100644
index 000000000..13dd40e26
--- /dev/null
+++ b/eng/common/templates/job/job.yml
@@ -0,0 +1,216 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+
+# Job base template specific parameters
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ name: ''
+ preSteps: []
+ runAsPublic: false
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - task: NuGetAuthenticate@0
+
+ - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
+ - task: PublishTestResults@2
+ displayName: Publish Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml
new file mode 100644
index 000000000..f877fd7a8
--- /dev/null
+++ b/eng/common/templates/job/performance.yml
@@ -0,0 +1,95 @@
+parameters:
+ steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
+ variables: [] # optional -- list of additional variables to send to the template
+ jobName: '' # required -- job name
+ displayName: '' # optional -- display name for the job. Will use jobName if not passed
+ pool: '' # required -- name of the Build pool
+ container: '' # required -- name of the container
+ osGroup: '' # required -- operating system for the job
+ extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
+ frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
+ continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+ dependsOn: '' # optional -- dependencies of the job
+ timeoutInMinutes: 320 # optional -- timeout for the job
+ enableTelemetry: false # optional -- enable for telemetry
+
+jobs:
+- template: ../jobs/jobs.yml
+ parameters:
+ dependsOn: ${{ parameters.dependsOn }}
+ enableTelemetry: ${{ parameters.enableTelemetry }}
+ enablePublishBuildArtifacts: true
+ continueOnError: ${{ parameters.continueOnError }}
+
+ jobs:
+ - job: '${{ parameters.jobName }}'
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: '${{ parameters.displayName }}'
+ ${{ if eq(parameters.displayName, '') }}:
+ displayName: '${{ parameters.jobName }}'
+
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ - IsInternal: ''
+ - HelixApiAccessToken: ''
+ - HelixPreCommand: ''
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
+ - IsInternal: -Internal
+ - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
+ - IsInternal: --internal
+
+ - group: DotNet-HelixApi-Access
+ - group: dotnet-benchview
+
+ workspace:
+ clean: all
+ pool:
+ ${{ parameters.pool }}
+ container: ${{ parameters.container }}
+ strategy:
+ matrix:
+ ${{ each framework in parameters.frameworks }}:
+ ${{ framework }}:
+ _Framework: ${{ framework }}
+ steps:
+ - checkout: self
+ clean: true
+ # Run all of the steps to setup repo
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+ - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Windows)
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Unix)
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
+ displayName: Run ci setup script
+ # Run perf testing in helix
+ - template: /eng/common/templates/steps/perf-send-to-helix.yml
+ parameters:
+ HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
+ HelixAccessToken: $(HelixApiAccessToken)
+ HelixTargetQueues: $(Queue)
+ HelixPreCommands: $(HelixPreCommand)
+ Creator: $(Creator)
+ WorkItemTimeout: 4:00 # 4 hours
+ WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
+ CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
\ No newline at end of file
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
new file mode 100644
index 000000000..b722975f9
--- /dev/null
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -0,0 +1,91 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: Publish to Build Asset Registry
+
+ pool: ${{ parameters.pool }}
+
+ variables:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - name: _BuildConfig
+ value: ${{ parameters.configuration }}
+ - group: Publish-Build-Assets
+
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:Configuration=$(_BuildConfig)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs to VSTS
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: $(Agent.Os)_PublishBuildAssets
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
new file mode 100644
index 000000000..c08225a9a
--- /dev/null
+++ b/eng/common/templates/jobs/jobs.yml
@@ -0,0 +1,72 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - template: ../job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ pool:
+ vmImage: vs2017-win2016
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+
+ - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
+ - template: ../job/generate-graph-files.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
+ dependsOn:
+ - Asset_Registry_Publish
+ pool:
+ vmImage: vs2017-win2016
diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml
new file mode 100644
index 000000000..0123cf43b
--- /dev/null
+++ b/eng/common/templates/phases/base.yml
@@ -0,0 +1,130 @@
+parameters:
+ # Optional: Clean sources before building
+ clean: true
+
+ # Optional: Git fetch depth
+ fetchDepth: ''
+
+ # Optional: name of the phase (not specifying phase name may cause name collisions)
+ name: ''
+ # Optional: display name of the phase
+ displayName: ''
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: dependencies of the phase
+ dependsOn: ''
+
+ # Required: A defined YAML queue
+ queue: {}
+
+ # Required: build steps
+ steps: []
+
+ # Optional: variables
+ variables: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ ## Telemetry variables
+
+ # Optional: enable sending telemetry
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _HelixBuildConfig - differentiate between Debug, Release, other
+ # _HelixSource - Example: build/product
+ # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
+ enableTelemetry: false
+
+ # Optional: Enable installing Microbuild plugin
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _TeamName - the name of your team
+ # _SignType - 'test' or 'real'
+ enableMicrobuild: false
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+phases:
+- phase: ${{ parameters.name }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ queue: ${{ parameters.queue }}
+
+ ${{ if ne(parameters.variables, '') }}:
+ variables:
+ ${{ insert }}: ${{ parameters.variables }}
+
+ steps:
+ - checkout: self
+ clean: ${{ parameters.clean }}
+ ${{ if ne(parameters.fetchDepth, '') }}:
+ fetchDepth: ${{ parameters.fetchDepth }}
+
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - template: /eng/common/templates/steps/telemetry-start.yml
+ parameters:
+ buildConfig: $(_HelixBuildConfig)
+ helixSource: $(_HelixSource)
+ helixType: $(_HelixType)
+ runAsPublic: ${{ parameters.runAsPublic }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ # Internal only resource, and Microbuild signing shouldn't be applied to PRs.
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: false
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ # Run provided build steps
+ - ${{ parameters.steps }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ # Internal only resources
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - template: /eng/common/templates/steps/telemetry-end.yml
+ parameters:
+ helixSource: $(_HelixSource)
+ helixType: $(_HelixType)
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ continueOnError: false
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: false
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml
new file mode 100644
index 000000000..a0a807428
--- /dev/null
+++ b/eng/common/templates/phases/publish-build-assets.yml
@@ -0,0 +1,51 @@
+parameters:
+ dependsOn: ''
+ queue: {}
+ configuration: 'Debug'
+ condition: succeeded()
+ continueOnError: false
+ runAsPublic: false
+ publishUsingPipelines: false
+phases:
+ - phase: Asset_Registry_Publish
+ displayName: Publish to Build Asset Registry
+ dependsOn: ${{ parameters.dependsOn }}
+ queue: ${{ parameters.queue }}
+ variables:
+ _BuildConfig: ${{ parameters.configuration }}
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: AzureKeyVault@1
+ inputs:
+ azureSubscription: 'DotNet-Engineering-Services_KeyVault'
+ KeyVaultName: EngKeyVault
+ SecretsFilter: 'MaestroAccessToken'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:Configuration=$(_BuildConfig)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs to VSTS
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: $(Agent.Os)_Asset_Registry_Publish
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/post-build/channels/netcore-dev-31.yml b/eng/common/templates/post-build/channels/netcore-dev-31.yml
new file mode 100644
index 000000000..af64724f7
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-dev-31.yml
@@ -0,0 +1,132 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_Dev31_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Core 3.1 Dev Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicDevRelease_31_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicDevRelease_31_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.PublicDevRelease_31_Channel_Id }}
diff --git a/eng/common/templates/post-build/channels/netcore-dev-5.yml b/eng/common/templates/post-build/channels/netcore-dev-5.yml
new file mode 100644
index 000000000..6c8dff542
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-dev-5.yml
@@ -0,0 +1,132 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_Dev5_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Core 5 Dev Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.NetCore_5_Dev_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.NetCore_5_Dev_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.NetCore_5_Dev_Channel_Id }}
diff --git a/eng/common/templates/post-build/channels/netcore-internal-30.yml b/eng/common/templates/post-build/channels/netcore-internal-30.yml
new file mode 100644
index 000000000..1a2d0d01f
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-internal-30.yml
@@ -0,0 +1,130 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_30_Internal_Servicing_Publishing
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Core 3.0 Internal Servicing Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.InternalServicing_30_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.InternalServicing_30_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=true
+ /p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
+ /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-transport/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3-internal-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.InternalServicing_30_Channel_Id }}
diff --git a/eng/common/templates/post-build/channels/netcore-release-30.yml b/eng/common/templates/post-build/channels/netcore-release-30.yml
new file mode 100644
index 000000000..206dd43e3
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-release-30.yml
@@ -0,0 +1,132 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_Release30_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Core 3.0 Release Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicRelease_30_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicRelease_30_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-transport/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.PublicRelease_30_Channel_Id }}
diff --git a/eng/common/templates/post-build/channels/netcore-release-31.yml b/eng/common/templates/post-build/channels/netcore-release-31.yml
new file mode 100644
index 000000000..6270c8283
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-release-31.yml
@@ -0,0 +1,132 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_Release31_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Core 3.1 Release Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicRelease_31_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicRelease_31_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.PublicRelease_31_Channel_Id }}
diff --git a/eng/common/templates/post-build/channels/netcore-tools-latest.yml b/eng/common/templates/post-build/channels/netcore-tools-latest.yml
new file mode 100644
index 000000000..9bf9626ca
--- /dev/null
+++ b/eng/common/templates/post-build/channels/netcore-tools-latest.yml
@@ -0,0 +1,132 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+ symbolPublishingAdditionalParameters: ''
+
+stages:
+- stage: NetCore_Tools_Latest_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Tools - Latest Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job:
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.NetCore_Tools_Latest_Channel_Id))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ artifactName: 'BlobArtifacts'
+ continueOnError: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download PDB Artifacts
+ inputs:
+ artifactName: 'PDBArtifacts'
+ continueOnError: true
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.NetCore_Tools_Latest_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.NetCore_Tools_Latest_Channel_Id }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/channels/public-validation-release.yml b/eng/common/templates/post-build/channels/public-validation-release.yml
new file mode 100644
index 000000000..5c8e91cce
--- /dev/null
+++ b/eng/common/templates/post-build/channels/public-validation-release.yml
@@ -0,0 +1,97 @@
+parameters:
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: false
+
+stages:
+- stage: PVR_Publish
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: .NET Tools - Validation Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ variables:
+ - group: DotNet-Blob-Feed
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', variables.PublicValidationRelease_30_Channel_Id))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Asset Manifests
+ inputs:
+ buildType: current
+ artifactName: AssetManifests
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:ArtifactsCategory=$(_DotNetValidationArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:PublishToAzureDevOpsNuGetFeeds=true
+ /p:AzureDevOpsStaticShippingFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/promote-build.yml
+ parameters:
+ ChannelId: ${{ variables.PublicValidationRelease_30_Channel_Id }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
new file mode 100644
index 000000000..9ccc08b2c
--- /dev/null
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -0,0 +1,70 @@
+variables:
+ - group: Publish-Build-Assets
+ - group: DotNet-DotNetCli-Storage
+ - group: DotNet-MSRC-Storage
+
+ # .NET Core 3.1 Dev
+ - name: PublicDevRelease_31_Channel_Id
+ value: 128
+
+ # .NET Core 5 Dev
+ - name: NetCore_5_Dev_Channel_Id
+ value: 131
+
+ # .NET Tools - Validation
+ - name: PublicValidationRelease_30_Channel_Id
+ value: 9
+
+ # .NET Tools - Latest
+ - name: NetCore_Tools_Latest_Channel_Id
+ value: 2
+
+ # .NET Core 3.0 Internal Servicing
+ - name: InternalServicing_30_Channel_Id
+ value: 184
+
+ # .NET Core 3.0 Release
+ - name: PublicRelease_30_Channel_Id
+ value: 19
+
+ # .NET Core 3.1 Release
+ - name: PublicRelease_31_Channel_Id
+ value: 129
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2019-01-16"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+
+ # Feed Configurations
+ # These should include the suffix "/index.json"
+
+ # Default locations for Installers and checksums
+ # Public Locations
+ - name: ChecksumsBlobFeedUrl
+ value: https://dotnetclichecksums.blob.core.windows.net/dotnet/index.json
+ - name: InstallersBlobFeedUrl
+ value: https://dotnetcli.blob.core.windows.net/dotnet/index.json
+
+ # Private Locations
+ - name: InternalChecksumsBlobFeedUrl
+ value: https://dotnetclichecksumsmsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalChecksumsBlobFeedKey
+ value: $(dotnetclichecksumsmsrc-storage-key)
+
+ - name: InternalInstallersBlobFeedUrl
+ value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalInstallersBlobFeedKey
+ value: $(dotnetclimsrc-access-key)
diff --git a/eng/common/templates/post-build/darc-gather-drop.yml b/eng/common/templates/post-build/darc-gather-drop.yml
new file mode 100644
index 000000000..3268ccaa5
--- /dev/null
+++ b/eng/common/templates/post-build/darc-gather-drop.yml
@@ -0,0 +1,23 @@
+parameters:
+ ChannelId: 0
+
+jobs:
+- job: gatherDrop
+ displayName: Gather Drop
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Darc gather-drop
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/darc-gather-drop.ps1
+ arguments: -BarBuildId $(BARBuildId)
+ -DropLocation $(Agent.BuildDirectory)/Temp/Drop/
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
new file mode 100644
index 000000000..8d7a762a1
--- /dev/null
+++ b/eng/common/templates/post-build/post-build.yml
@@ -0,0 +1,146 @@
+parameters:
+ enableSourceLinkValidation: false
+ enableSigningValidation: true
+ enableSymbolValidation: false
+ enableNugetValidation: true
+ publishInstallersAndChecksums: false
+ SDLValidationParameters:
+ enable: false
+ continueOnError: false
+ params: ''
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ symbolPublishingAdditionalParameters: ''
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Which stages should finish execution before post-build stages start
+ validateDependsOn:
+ - build
+ publishDependsOn:
+ - Validate
+
+stages:
+- stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate
+ jobs:
+ - ${{ if eq(parameters.enableNugetValidation, 'true') }}:
+ - job:
+ displayName: NuGet Validation
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - ${{ if eq(parameters.enableSigningValidation, 'true') }}:
+ - job:
+ displayName: Signing Validation
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: current
+ artifactName: PackageArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine dotnet
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
+ - job:
+ displayName: SourceLink Validation
+ variables:
+ - template: common-variables.yml
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: current
+ artifactName: BlobArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+ - ${{ if eq(parameters.SDLValidationParameters.enable, 'true') }}:
+ - template: /eng/common/templates/job/execute-sdl.yml
+ parameters:
+ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+
+- template: \eng\common\templates\post-build\channels\netcore-dev-5.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+
+- template: \eng\common\templates\post-build\channels\netcore-dev-31.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+
+- template: \eng\common\templates\post-build\channels\netcore-tools-latest.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+
+- template: \eng\common\templates\post-build\channels\public-validation-release.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+
+- template: \eng\common\templates\post-build\channels\netcore-release-30.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+
+- template: \eng\common\templates\post-build\channels\netcore-release-31.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+
+- template: \eng\common\templates\post-build\channels\netcore-internal-30.yml
+ parameters:
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
diff --git a/eng/common/templates/post-build/promote-build.yml b/eng/common/templates/post-build/promote-build.yml
new file mode 100644
index 000000000..6b479c3b8
--- /dev/null
+++ b/eng/common/templates/post-build/promote-build.yml
@@ -0,0 +1,25 @@
+parameters:
+ ChannelId: 0
+
+jobs:
+- job:
+ displayName: Promote Build
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'], format('[{0}]', ${{ parameters.ChannelId }}))
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: ChannelId
+ value: ${{ parameters.ChannelId }}
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId $(ChannelId)
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 000000000..56242b068
--- /dev/null
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,18 @@
+jobs:
+- job: setupMaestroVars
+ displayName: Setup Maestro Vars
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/setup-maestro-vars.ps1
+ arguments: -ReleaseConfigsPath '$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt'
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
new file mode 100644
index 000000000..da669030d
--- /dev/null
+++ b/eng/common/templates/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
new file mode 100644
index 000000000..eba58109b
--- /dev/null
+++ b/eng/common/templates/steps/build-reason.yml
@@ -0,0 +1,12 @@
+# build-reason.yml
+# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
+# to include steps (',' separated).
+parameters:
+ conditions: ''
+ steps: []
+
+steps:
+ - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
+ - ${{ parameters.steps }}
+ - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
new file mode 100644
index 000000000..b3ea9acf1
--- /dev/null
+++ b/eng/common/templates/steps/perf-send-to-helix.yml
@@ -0,0 +1,66 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 $(Build.SourcesDirectory)\eng\common\performance\perfhelixpublish.proj /restore /t:Test /bl:$(Build.SourcesDirectory)\artifacts\log\$env:BuildConfig\SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/performance/perfhelixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/promote-build.yml b/eng/common/templates/steps/promote-build.yml
new file mode 100644
index 000000000..b90404435
--- /dev/null
+++ b/eng/common/templates/steps/promote-build.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/promote-build.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
new file mode 100644
index 000000000..e1733814f
--- /dev/null
+++ b/eng/common/templates/steps/run-on-unix.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
new file mode 100644
index 000000000..73e7e9c27
--- /dev/null
+++ b/eng/common/templates/steps/run-on-windows.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
new file mode 100644
index 000000000..3d1242f55
--- /dev/null
+++ b/eng/common/templates/steps/run-script-ifequalelse.yml
@@ -0,0 +1,33 @@
+parameters:
+ # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
+ parameter1: ''
+ parameter2: ''
+ ifScript: ''
+ elseScript: ''
+
+ # name of script step
+ name: Script
+
+ # display name of script step
+ displayName: If-Equal-Else Script
+
+ # environment
+ env: {}
+
+ # conditional expression for step execution
+ condition: ''
+
+steps:
+- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.ifScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.elseScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
new file mode 100644
index 000000000..05df886f5
--- /dev/null
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -0,0 +1,91 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in seconds for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
new file mode 100644
index 000000000..fadc04ca1
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-end.yml
@@ -0,0 +1,102 @@
+parameters:
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- bash: |
+ if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
+ errorCount=0
+ else
+ errorCount=1
+ fi
+ warningCount=0
+
+ curlStatus=1
+ retryCount=0
+ # retry loop to harden against spotty telemetry connections
+ # we don't retry successes and 4xx client errors
+ until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
+ do
+ if [ $retryCount -gt 0 ]; then
+ echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
+ sleep $RetryDelay
+ fi
+
+ # create a temporary file for curl output
+ res=`mktemp`
+
+ curlResult=`
+ curl --verbose --output $res --write-out "%{http_code}"\
+ -H 'Content-Type: application/json' \
+ -H "X-Helix-Job-Token: $Helix_JobToken" \
+ -H 'Content-Length: 0' \
+ -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
+ --data-urlencode "errorCount=$errorCount" \
+ --data-urlencode "warningCount=$warningCount"`
+ curlStatus=$?
+
+ if [ $curlStatus -eq 0 ]; then
+ if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
+ curlStatus=$curlResult
+ fi
+ fi
+
+ let retryCount++
+ done
+
+ if [ $curlStatus -ne 0 ]; then
+ echo "Failed to Send Build Finish information after $retryCount retries"
+ vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
+ echo "##$vstsLogOutput"
+ exit 1
+ fi
+ displayName: Send Unix Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.sh
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
+- powershell: |
+ if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
+ $ErrorCount = 0
+ } else {
+ $ErrorCount = 1
+ }
+ $WarningCount = 0
+
+ # Basic retry loop to harden against server flakiness
+ $retryCount = 0
+ while ($retryCount -lt $env:MaxRetries) {
+ try {
+ Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
+ -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
+ break
+ }
+ catch {
+ $statusCode = $_.Exception.Response.StatusCode.value__
+ if ($statusCode -ge 400 -and $statusCode -le 499) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
+ Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
+ exit 1
+ }
+ Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
+ $retryCount++
+ sleep $env:RetryDelay
+ continue
+ }
+ }
+
+ if ($retryCount -ge $env:MaxRetries) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
+ exit 1
+ }
+ displayName: Send Windows Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.ps1
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
new file mode 100644
index 000000000..32c01ef0b
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-start.yml
@@ -0,0 +1,241 @@
+parameters:
+ helixSource: 'undefined_defaulted_in_telemetry.yml'
+ helixType: 'undefined_defaulted_in_telemetry.yml'
+ buildConfig: ''
+ runAsPublic: false
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
+ - task: AzureKeyVault@1
+ inputs:
+ azureSubscription: 'HelixProd_KeyVault'
+ KeyVaultName: HelixProdKV
+ SecretsFilter: 'HelixApiAccessToken'
+ condition: always()
+- bash: |
+ # create a temporary file
+ jobInfo=`mktemp`
+
+ # write job info content to temporary file
+ cat > $jobInfo <' | Set-Content $proj
+
+ MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
+
+ $path = Get-Content $toolsetLocationFile -TotalCount 1
+ if (!(Test-Path $path)) {
+ throw "Invalid toolset path: $path"
+ }
+
+ return $global:_ToolsetBuildProj = $path
+}
+
+function ExitWithExitCode([int] $exitCode) {
+ if ($ci -and $prepareMachine) {
+ Stop-Processes
+ }
+ exit $exitCode
+}
+
+function Stop-Processes() {
+ Write-Host "Killing running build processes..."
+ foreach ($processName in $processesToStopOnExit) {
+ Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
+ }
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild() {
+ if ($pipelinesLog) {
+ $buildTool = InitializeBuildTool
+
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if ($ci -and $buildTool.Tool -eq "dotnet") {
+ dotnet nuget locals http-cache -c
+
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
+ }
+
+ $toolsetBuildProject = InitializeToolset
+ $path = Split-Path -parent $toolsetBuildProject
+ $path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll")
+ $args += "/logger:$path"
+ }
+
+ MSBuild-Core @args
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild-Core() {
+ if ($ci) {
+ if (!$binaryLog) {
+ Write-PipelineTaskError -Message "Binary log must be enabled in CI build."
+ ExitWithExitCode 1
+ }
+
+ if ($nodeReuse) {
+ Write-PipelineTaskError -Message "Node reuse must be disabled in CI build."
+ ExitWithExitCode 1
+ }
+ }
+
+ $buildTool = InitializeBuildTool
+
+ $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+
+ if ($warnAsError) {
+ $cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
+ }
+ else {
+ $cmdArgs += " /p:TreatWarningsAsErrors=false"
+ }
+
+ foreach ($arg in $args) {
+ if ($arg -ne $null -and $arg.Trim() -ne "") {
+ $cmdArgs += " `"$arg`""
+ }
+ }
+
+ $exitCode = Exec-Process $buildTool.Path $cmdArgs
+
+ if ($exitCode -ne 0) {
+ Write-PipelineTaskError -Message "Build failed."
+
+ $buildLog = GetMSBuildBinaryLogCommandLineArgument $args
+ if ($buildLog -ne $null) {
+ Write-Host "See log: $buildLog" -ForegroundColor DarkGray
+ }
+
+ ExitWithExitCode $exitCode
+ }
+}
+
+function GetMSBuildBinaryLogCommandLineArgument($arguments) {
+ foreach ($argument in $arguments) {
+ if ($argument -ne $null) {
+ $arg = $argument.Trim()
+ if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
+ return $arg.Substring("/bl:".Length)
+ }
+
+ if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
+ return $arg.Substring("/binaryLogger:".Length)
+ }
+ }
+ }
+
+ return $null
+}
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
+$ArtifactsDir = Join-Path $RepoRoot "artifacts"
+$ToolsetDir = Join-Path $ArtifactsDir "toolset"
+$ToolsDir = Join-Path $RepoRoot ".tools"
+$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
+# true if global.json contains a "runtimes" section
+$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
+
+Create-Directory $ToolsetDir
+Create-Directory $TempDir
+Create-Directory $LogDir
+
+Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir
+Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
+Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
+Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
+Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
new file mode 100644
index 000000000..93ee4d67e
--- /dev/null
+++ b/eng/common/tools.sh
@@ -0,0 +1,413 @@
+#!/usr/bin/env bash
+
+# Initialize variables if they aren't already defined.
+
+# CI mode - set to true on CI server for PR validation build or official build.
+ci=${ci:-false}
+
+# Set to true to use the pipelines logger which will enable Azure logging output.
+# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
+# This flag is meant as a temporary opt-opt for the feature while validate it across
+# our consumers. It will be deleted in the future.
+if [[ "$ci" == true ]]; then
+ pipelines_log=${pipelines_log:-true}
+else
+ pipelines_log=${pipelines_log:-false}
+fi
+
+# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
+configuration=${configuration:-'Debug'}
+
+# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
+# Binary log must be enabled on CI.
+binary_log=${binary_log:-$ci}
+
+# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
+prepare_machine=${prepare_machine:-false}
+
+# True to restore toolsets and dependencies.
+restore=${restore:-true}
+
+# Adjusts msbuild verbosity level.
+verbosity=${verbosity:-'minimal'}
+
+# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
+if [[ "$ci" == true ]]; then
+ node_reuse=${node_reuse:-false}
+else
+ node_reuse=${node_reuse:-true}
+fi
+
+# Configures warning treatment in msbuild.
+warn_as_error=${warn_as_error:-true}
+
+# True to attempt using .NET Core already that meets requirements specified in global.json
+# installed on the machine instead of downloading one.
+use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
+
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dot.net/v1/dotnet-install.sh
+dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
+
+# True to use global NuGet cache instead of restoring packages to repository-local directory.
+if [[ "$ci" == true ]]; then
+ use_global_nuget_cache=${use_global_nuget_cache:-false}
+else
+ use_global_nuget_cache=${use_global_nuget_cache:-true}
+fi
+
+# Resolve any symlinks in the given path.
+function ResolvePath {
+ local path=$1
+
+ while [[ -h $path ]]; do
+ local dir="$( cd -P "$( dirname "$path" )" && pwd )"
+ path="$(readlink "$path")"
+
+ # if $path was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $path != /* ]] && path="$dir/$path"
+ done
+
+ # return value
+ _ResolvePath="$path"
+}
+
+# ReadVersionFromJson [json key]
+function ReadGlobalVersion {
+ local key=$1
+
+ local line=`grep -m 1 "$key" "$global_json_file"`
+ local pattern="\"$key\" *: *\"(.*)\""
+
+ if [[ ! $line =~ $pattern ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Error: Cannot find \"$key\" in $global_json_file"
+ ExitWithExitCode 1
+ fi
+
+ # return value
+ _ReadGlobalVersion=${BASH_REMATCH[1]}
+}
+
+function InitializeDotNetCli {
+ if [[ -n "${_InitializeDotNetCli:-}" ]]; then
+ return
+ fi
+
+ local install=$1
+
+ # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
+ export DOTNET_MULTILEVEL_LOOKUP=0
+
+ # Disable first run since we want to control all package sources
+ export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+
+ # Disable telemetry on CI
+ if [[ $ci == true ]]; then
+ export DOTNET_CLI_TELEMETRY_OPTOUT=1
+ fi
+
+ # LTTNG is the logging infrastructure used by Core CLR. Need this variable set
+ # so it doesn't output warnings to the console.
+ export LTTNG_HOME="$HOME"
+
+ # Source Build uses DotNetCoreSdkDir variable
+ if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
+ export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
+ fi
+
+ # Find the first path on $PATH that contains the dotnet.exe
+ if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
+ local dotnet_path=`command -v dotnet`
+ if [[ -n "$dotnet_path" ]]; then
+ ResolvePath "$dotnet_path"
+ export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
+ fi
+ fi
+
+ ReadGlobalVersion "dotnet"
+ local dotnet_sdk_version=$_ReadGlobalVersion
+ local dotnet_root=""
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+ if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ dotnet_root="$DOTNET_INSTALL_DIR"
+ else
+ dotnet_root="$repo_root/.dotnet"
+
+ export DOTNET_INSTALL_DIR="$dotnet_root"
+
+ if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ if [[ "$install" == true ]]; then
+ InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
+ else
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
+ ExitWithExitCode 1
+ fi
+ fi
+ fi
+
+ # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
+ # build steps from using anything other than what we've downloaded.
+ Write-PipelinePrependPath -path "$dotnet_root"
+
+ Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
+ Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+
+ # return value
+ _InitializeDotNetCli="$dotnet_root"
+}
+
+function InstallDotNetSdk {
+ local root=$1
+ local version=$2
+ local architecture=""
+ if [[ $# == 3 ]]; then
+ architecture=$3
+ fi
+ InstallDotNet "$root" "$version" $architecture
+}
+
+function InstallDotNet {
+ local root=$1
+ local version=$2
+
+ GetDotNetInstallScript "$root"
+ local install_script=$_GetDotNetInstallScript
+
+ local archArg=''
+ if [[ -n "${3:-}" ]]; then
+ archArg="--architecture $3"
+ fi
+ local runtimeArg=''
+ if [[ -n "${4:-}" ]]; then
+ runtimeArg="--runtime $4"
+ fi
+
+ local skipNonVersionedFilesArg=""
+ if [[ "$#" -ge "5" ]]; then
+ skipNonVersionedFilesArg="--skip-non-versioned-files"
+ fi
+ bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+}
+
+function GetDotNetInstallScript {
+ local root=$1
+ local install_script="$root/dotnet-install.sh"
+ local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh"
+
+ if [[ ! -a "$install_script" ]]; then
+ mkdir -p "$root"
+
+ echo "Downloading '$install_script_url'"
+
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ else
+ wget -q -O "$install_script" "$install_script_url" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ fi
+ fi
+ # return value
+ _GetDotNetInstallScript="$install_script"
+}
+
+function InitializeBuildTool {
+ if [[ -n "${_InitializeBuildTool:-}" ]]; then
+ return
+ fi
+
+ InitializeDotNetCli $restore
+
+ # return values
+ _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
+ _InitializeBuildToolCommand="msbuild"
+ _InitializeBuildToolFramework="netcoreapp2.1"
+}
+
+function GetNuGetPackageCachePath {
+ if [[ -z ${NUGET_PACKAGES:-} ]]; then
+ if [[ "$use_global_nuget_cache" == true ]]; then
+ export NUGET_PACKAGES="$HOME/.nuget/packages"
+ else
+ export NUGET_PACKAGES="$repo_root/.packages"
+ fi
+ fi
+
+ # return value
+ _GetNuGetPackageCachePath=$NUGET_PACKAGES
+}
+
+function InitializeNativeTools() {
+ if grep -Fq "native-tools" $global_json_file
+ then
+ local nativeArgs=""
+ if [[ "$ci" == true ]]; then
+ nativeArgs="--installDirectory $tools_dir"
+ fi
+ "$_script_dir/init-tools-native.sh" $nativeArgs
+ fi
+}
+
+function InitializeToolset {
+ if [[ -n "${_InitializeToolset:-}" ]]; then
+ return
+ fi
+
+ GetNuGetPackageCachePath
+
+ ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
+
+ local toolset_version=$_ReadGlobalVersion
+ local toolset_location_file="$toolset_dir/$toolset_version.txt"
+
+ if [[ -a "$toolset_location_file" ]]; then
+ local path=`cat "$toolset_location_file"`
+ if [[ -a "$path" ]]; then
+ # return value
+ _InitializeToolset="$path"
+ return
+ fi
+ fi
+
+ if [[ "$restore" != true ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored."
+ ExitWithExitCode 2
+ fi
+
+ local proj="$toolset_dir/restore.proj"
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:$log_dir/ToolsetRestore.binlog"
+ fi
+
+ echo '' > "$proj"
+ MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
+
+ local toolset_build_proj=`cat "$toolset_location_file"`
+
+ if [[ ! -a "$toolset_build_proj" ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Invalid toolset path: $toolset_build_proj"
+ ExitWithExitCode 3
+ fi
+
+ # return value
+ _InitializeToolset="$toolset_build_proj"
+}
+
+function ExitWithExitCode {
+ if [[ "$ci" == true && "$prepare_machine" == true ]]; then
+ StopProcesses
+ fi
+ exit $1
+}
+
+function StopProcesses {
+ echo "Killing running build processes..."
+ pkill -9 "dotnet" || true
+ pkill -9 "vbcscompiler" || true
+ return 0
+}
+
+function MSBuild {
+ local args=$@
+ if [[ "$pipelines_log" == true ]]; then
+ InitializeBuildTool
+ InitializeToolset
+
+ # Work around issues with Azure Artifacts credential provider
+ # https://github.com/dotnet/arcade/issues/3932
+ if [[ "$ci" == true ]]; then
+ dotnet nuget locals http-cache -c
+
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+ fi
+
+ local toolset_dir="${_InitializeToolset%/*}"
+ local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll"
+ args=( "${args[@]}" "-logger:$logger_path" )
+ fi
+
+ MSBuild-Core ${args[@]}
+}
+
+function MSBuild-Core {
+ if [[ "$ci" == true ]]; then
+ if [[ "$binary_log" != true ]]; then
+ Write-PipelineTaskError "Binary log must be enabled in CI build."
+ ExitWithExitCode 1
+ fi
+
+ if [[ "$node_reuse" == true ]]; then
+ Write-PipelineTaskError "Node reuse must be disabled in CI build."
+ ExitWithExitCode 1
+ fi
+ fi
+
+ InitializeBuildTool
+
+ local warnaserror_switch=""
+ if [[ $warn_as_error == true ]]; then
+ warnaserror_switch="/warnaserror"
+ fi
+
+ "$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
+ local exit_code=$?
+ Write-PipelineTaskError "Build failed (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+}
+
+ResolvePath "${BASH_SOURCE[0]}"
+_script_dir=`dirname "$_ResolvePath"`
+
+. "$_script_dir/pipeline-logging-functions.sh"
+
+eng_root=`cd -P "$_script_dir/.." && pwd`
+repo_root=`cd -P "$_script_dir/../.." && pwd`
+artifacts_dir="$repo_root/artifacts"
+toolset_dir="$artifacts_dir/toolset"
+tools_dir="$repo_root/.tools"
+log_dir="$artifacts_dir/log/$configuration"
+temp_dir="$artifacts_dir/tmp/$configuration"
+
+global_json_file="$repo_root/global.json"
+# determine if global.json contains a "runtimes" entry
+global_json_has_runtimes=false
+dotnetlocal_key=`grep -m 1 "runtimes" "$global_json_file"` || true
+if [[ -n "$dotnetlocal_key" ]]; then
+ global_json_has_runtimes=true
+fi
+
+# HOME may not be defined in some scenarios, but it is required by NuGet
+if [[ -z $HOME ]]; then
+ export HOME="$repo_root/artifacts/.home/"
+ mkdir -p "$HOME"
+fi
+
+mkdir -p "$toolset_dir"
+mkdir -p "$temp_dir"
+mkdir -p "$log_dir"
+
+Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir"
+Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
+Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
+Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
+Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
diff --git a/eng/configure-toolset.ps1 b/eng/configure-toolset.ps1
new file mode 100644
index 000000000..1c89f1a18
--- /dev/null
+++ b/eng/configure-toolset.ps1
@@ -0,0 +1,9 @@
+# Disable using the globally installed SDK. Using the global install can cause
+# roll-forward to a newer SDK that may not work.
+$script:useInstalledDotNetCli = $false
+
+# Always use the local repo packages directory instead of the user's NuGet cache
+# to keep the same between "ci" and non-"ci" builds. If the efficiency gain is
+# required and it's worth maintaining the different types of build, this can be
+# removed.
+$script:useGlobalNuGetCache = $false
diff --git a/eng/jobs/prepare-signed-artifacts.yml b/eng/jobs/prepare-signed-artifacts.yml
new file mode 100644
index 000000000..ed807af3a
--- /dev/null
+++ b/eng/jobs/prepare-signed-artifacts.yml
@@ -0,0 +1,63 @@
+parameters:
+ dependsOn: []
+ PublishRidAgnosticPackagesFromJobName: ''
+
+jobs:
+- job: PrepareSignedArtifacts
+ displayName: Prepare Signed Artifacts
+ dependsOn: ${{ parameters.dependsOn }}
+ pool:
+ name: NetCoreInternal-Pool
+ queue: buildpool.windows.10.amd64.vs2017
+ # Double the default timeout.
+ timeoutInMinutes: 120
+ workspace:
+ clean: all
+
+ steps:
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin for Signing
+ inputs:
+ signType: $(SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ continueOnError: false
+ condition: and(succeeded(), in(variables['SignType'], 'real', 'test'))
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download IntermediateUnsignedArtifacts
+ inputs:
+ artifactName: IntermediateUnsignedArtifacts
+ downloadPath: $(Build.SourcesDirectory)\artifacts\PackageDownload
+
+ - script: >-
+ build.cmd -ci
+ -projects $(Build.SourcesDirectory)\publish\prepare-artifacts.proj
+ /p:Configuration=Release
+ /p:PublishRidAgnosticPackagesFromJobName=${{ parameters.PublishRidAgnosticPackagesFromJobName }}
+ /p:SignType=$(SignType)
+ /p:DotNetSignType=$(SignType)
+ /bl:$(Build.SourcesDirectory)\prepare-artifacts.binlog
+ displayName: Prepare artifacts and upload to build
+
+ - task: CopyFiles@2
+ displayName: Copy Files to $(Build.StagingDirectory)\BuildLogs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ TargetFolder: '$(Build.StagingDirectory)\BuildLogs'
+ continueOnError: true
+ condition: succeededOrFailed()
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Artifact BuildLogs
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)\BuildLogs'
+ ArtifactName: Logs-PrepareSignedArtifacts
+ condition: succeededOrFailed()
diff --git a/eng/jobs/run-publish-project.yml b/eng/jobs/run-publish-project.yml
new file mode 100644
index 000000000..525d31f48
--- /dev/null
+++ b/eng/jobs/run-publish-project.yml
@@ -0,0 +1,156 @@
+parameters:
+ projectName: ''
+ dependency: null
+
+jobs:
+
+- template: /eng/common/templates/post-build/setup-maestro-vars.yml
+
+- job: CustomPublish
+ displayName: Custom Publish
+ dependsOn: setupMaestroVars
+ # Only run this job if setup-maestro-vars says the current branch publishes to this channel.
+ # Logic copied from ../common/templates/post-build/channels/netcore-dev-5.yml
+ condition: contains(
+ dependencies.setupMaestroVars.outputs['setReleaseVars.InitialChannels'],
+ format('[{0}]', variables['${{ parameters.dependency.channel.bar }}']))
+ pool:
+ name: Hosted VS2017
+ # Double the default timeout.
+ timeoutInMinutes: 120
+ workspace:
+ clean: all
+
+ variables:
+ # Only get the secret variable groups if the def has the official name. Reduce dev build def risk.
+ - ${{ if eq(variables['Build.DefinitionName'], 'dotnet-windowsdesktop') }}:
+ # Used for publishing individual leg assets to azure blob storage
+ - ${{ if eq(parameters.dependency.channel.public, 'true') }}:
+ - group: DotNet-DotNetCli-Storage
+ - ${{ if ne(parameters.dependency.channel.public, 'true') }}:
+ - group: DotNet-MSRC-Storage
+ # Used for dotnet/versions update
+ - group: DotNet-Versions-Publish
+
+ - name: _DefaultContainerName
+ value: dotnet
+ - name: _DefaultChecksumsContainerName
+ value: dotnet
+
+ - ${{ if eq(parameters.dependency.channel.public, 'true') }}:
+ - name: _DefaultAzureAccountName
+ value: dotnetcli
+ - name: _DefaultAzureAccessToken
+ value: $(dotnetcli-storage-key)
+ - name: _DefaultChecksumAzureAccountName
+ value: dotnetclichecksums
+ - name: _DefaultChecksumAzureAccessToken
+ value: $(dotnetclichecksums-storage-key)
+ # dotnet/versions update
+ - name: _GitHubUser
+ value: $[ coalesce(variables.GitHubUser, 'dotnet-build-bot') ]
+ - name: _GitHubEmail
+ value: $[ coalesce(variables.GitHubEmail, 'dotnet-build-bot@microsoft.com') ]
+ - name: _GitHubAuthToken
+ value: $[ coalesce(variables.GitHubAuthToken, '$(AccessToken-dotnet-build-bot-public-repo)') ]
+ - name: _VersionsRepoOwner
+ value: $[ coalesce(variables.VersionsRepoOwner, 'dotnet') ]
+ - name: _VersionsRepo
+ value: $[ coalesce(variables.VersionsRepo, 'versions') ]
+ - name: _DotNetVersionsArgs
+ value: >-
+ /p:GitHubUser=$(_GitHubUser)
+ /p:GitHubEmail=$(_GitHubEmail)
+ /p:GitHubAuthToken=$(_GitHubAuthToken)
+ /p:VersionsRepoOwner=$(_VersionsRepoOwner)
+ /p:VersionsRepo=$(_VersionsRepo)
+ /p:VersionsRepoPath=build-info/dotnet/windowsdesktop/$(FullBranchName)
+
+ - ${{ if ne(parameters.dependency.channel.public, 'true') }}:
+ - name: _DefaultAzureAccountName
+ value: dotnetclimsrc
+ - name: _DefaultAzureAccessToken
+ value: $(dotnetclimsrc-access-key)
+ - name: _DefaultChecksumAzureAccountName
+ value: dotnetclimsrc
+ - name: _DefaultChecksumsContainerName
+ value: dotnet-checksums
+ - name: _DefaultChecksumAzureAccessToken
+ value: $(dotnetclimsrc-access-key)
+ # dotnet/versions update (disabled)
+ - name: _DotNetVersionsArgs
+ value: ''
+
+ # Blob storage publish (installers and checksums)
+ - name: _AzureAccountName
+ value: $[ coalesce(variables.AzureAccountName, '$(_DefaultAzureAccountName)') ]
+ - name: _ContainerName
+ value: $[ coalesce(variables.ContainerName, '$(_DefaultContainerName)') ]
+ - name: _AzureAccessToken
+ value: $[ coalesce(variables.AzureAccessToken, '$(_DefaultAzureAccessToken)') ]
+ - name: _ChecksumAzureAccountName
+ value: $[ coalesce(variables.ChecksumAzureAccountName, '$(_DefaultChecksumAzureAccountName)') ]
+ - name: _ChecksumContainerName
+ value: $[ coalesce(variables.ChecksumContainerName, '$(_DefaultChecksumsContainerName)') ]
+ - name: _ChecksumAzureAccessToken
+ value: $[ coalesce(variables.ChecksumAzureAccessToken, '$(_DefaultChecksumAzureAccessToken)') ]
+
+ - name: _CommonPublishArgs
+ value: >-
+ /p:AzureAccountName=$(_AzureAccountName)
+ /p:ContainerName=$(_ContainerName)
+ /p:AzureAccessToken=$(_AzureAccessToken)
+ /p:ChecksumAzureAccountName=$(_ChecksumAzureAccountName)
+ /p:ChecksumContainerName=$(_ChecksumContainerName)
+ /p:ChecksumAzureAccessToken=$(_ChecksumAzureAccessToken)
+
+ steps:
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Artifacts
+ inputs:
+ artifactName: PreparedArtifacts
+ downloadPath: $(Build.SourcesDirectory)\artifacts\PackageDownload
+
+ - powershell: |
+ $prefix = "refs/heads/"
+ $branch = "$(Build.SourceBranch)"
+ $branchName = $branch
+ if ($branchName.StartsWith($prefix))
+ {
+ $branchName = $branchName.Substring($prefix.Length)
+ }
+ Write-Host "For Build.SourceBranch $branch, FullBranchName is $branchName"
+ Write-Host "##vso[task.setvariable variable=FullBranchName;]$branchName"
+ displayName: Find true SourceBranchName
+
+ - script: >-
+ build.cmd -ci
+ -projects $(Build.SourcesDirectory)\publish\${{ parameters.projectName }}.proj
+ /p:Channel=${{ parameters.dependency.channel.storage }}
+ /p:Configuration=Release
+ $(_CommonPublishArgs)
+ $(_DotNetVersionsArgs)
+ /bl:$(Build.SourcesDirectory)\${{ parameters.projectName }}.binlog
+ displayName: Publish to custom locations
+
+ - task: CopyFiles@2
+ displayName: Copy Files to $(Build.StagingDirectory)\BuildLogs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ TargetFolder: '$(Build.StagingDirectory)\BuildLogs'
+ continueOnError: true
+ condition: succeededOrFailed()
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Artifact BuildLogs
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)\BuildLogs'
+ ArtifactName: Logs-CustomPublish-${{ parameters.dependency.dependsOn }}-${{ parameters.projectName }}
+ condition: succeededOrFailed()
diff --git a/eng/jobs/steps/upload-job-artifacts.yml b/eng/jobs/steps/upload-job-artifacts.yml
new file mode 100644
index 000000000..8b9e42f6a
--- /dev/null
+++ b/eng/jobs/steps/upload-job-artifacts.yml
@@ -0,0 +1,56 @@
+parameters:
+ name: ''
+
+steps:
+# Upload build outputs as build artifacts only if internal and not PR, to save storage space.
+- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Prepare job-specific Artifacts subdirectory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/packages/$(_BuildConfig)'
+ Contents: |
+ Shipping/**/*
+ NonShipping/**/*
+ TargetFolder: '$(Build.StagingDirectory)/Artifacts/${{ parameters.name }}'
+ CleanTargetFolder: true
+ condition: and(succeeded(), eq(variables._BuildConfig, 'Release'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Artifacts
+ inputs:
+ pathToPublish: '$(Build.StagingDirectory)/Artifacts'
+ artifactName: IntermediateUnsignedArtifacts
+ artifactType: container
+ condition: and(succeeded(), eq(variables._BuildConfig, 'Release'))
+
+# Always upload test outputs and build logs.
+- task: PublishTestResults@2
+ displayName: Publish Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ mergeTestResults: true
+ testRunTitle: ${{ parameters.name }}-$(_BuildConfig)
+ continueOnError: true
+ condition: always()
+
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- task: PublishBuildArtifacts@1
+ displayName: Publish BuildLogs
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/BuildLogs'
+ ArtifactName: Logs-${{ parameters.name }}-$(_BuildConfig)
+ continueOnError: true
+ condition: succeededOrFailed()
diff --git a/eng/jobs/windows-build.yml b/eng/jobs/windows-build.yml
new file mode 100644
index 000000000..c99e188ec
--- /dev/null
+++ b/eng/jobs/windows-build.yml
@@ -0,0 +1,81 @@
+parameters:
+ additionalMSBuildArguments: ''
+ displayName: ''
+ publishRidAgnosticPackages: false
+ skipTests: $(SkipTests)
+ targetArchitecture: null
+ timeoutInMinutes: 120
+
+jobs:
+ - job: ${{ parameters.name }}
+ displayName: ${{ parameters.name }}
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+ pool:
+ # Use a hosted pool when possible.
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: NetCorePublic-Pool
+ queue: buildpool.windows.10.amd64.vs2017.open
+ ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ name: NetCoreInternal-Pool
+ queue: buildpool.windows.10.amd64.vs2017
+ strategy:
+ matrix:
+ debug:
+ _BuildConfig: Debug
+ release:
+ _BuildConfig: Release
+ workspace:
+ clean: all
+ variables:
+ CommonMSBuildArgs: >-
+ /p:Configuration=$(_BuildConfig)
+ /p:OfficialBuildId=$(OfficialBuildId)
+ /p:TargetArchitecture=${{ parameters.targetArchitecture }}
+ /p:PortableBuild=true
+ /p:SkipTests=${{ parameters.skipTests }}
+ MsbuildSigningArguments: >-
+ /p:CertificateId=400
+ /p:DotNetSignType=$(SignType)
+ TargetArchitecture: ${{ parameters.targetArchitecture }}
+
+ steps:
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin for Signing
+ inputs:
+ signType: $(SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ continueOnError: false
+ condition: and(succeeded(), in(variables['SignType'], 'real', 'test'))
+
+ # NuGet's http cache lasts 30 minutes. If we're on a static machine, this may interfere with
+ # auto-update PRs by preventing the CI build from fetching the new version. Delete the cache.
+ - powershell: Remove-Item -Recurse -ErrorAction Ignore "$env:LocalAppData\NuGet\v3-cache"
+ displayName: Clear NuGet http cache (if exists)
+
+ - script: >-
+ build.cmd -ci -test
+ $(CommonMSBuildArgs)
+ $(MsbuildSigningArguments)
+ displayName: Build
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetCommand@2
+ displayName: Push Visual Studio NuPkgs
+ inputs:
+ command: push
+ packagesToPush: '$(Build.SourcesDirectory)/artifacts/packages/$(_BuildConfig)/*/VS.Redist.Common.*.nupkg'
+ nuGetFeedType: external
+ publishFeedCredentials: 'DevDiv - VS package feed'
+ condition: and(
+ succeeded(),
+ eq(variables['_BuildConfig'], 'Release'),
+ ne(variables['DisableVSPublish'], 'true'))
+
+ - template: steps/upload-job-artifacts.yml
+ parameters:
+ name: ${{ parameters.name }}
diff --git a/eng/stages/publish.yml b/eng/stages/publish.yml
new file mode 100644
index 000000000..bff4a31ad
--- /dev/null
+++ b/eng/stages/publish.yml
@@ -0,0 +1,46 @@
+parameters:
+ dependsOnPublishStages: []
+
+stages:
+
+# Create extra stage per BAR channel that needs extra publish steps.
+- ${{ each dependency in parameters.dependsOnPublishStages }}:
+ - stage: PublishBlob_${{ dependency.dependsOn }}
+ displayName: '${{ dependency.channel.name }} Blob Publish'
+ dependsOn: PrepareForPublish
+ variables:
+ - template: /eng/common/templates/post-build/common-variables.yml
+ jobs:
+ - template: /eng/jobs/run-publish-project.yml
+ parameters:
+ projectName: publish-blobs
+ dependency: ${{ dependency }}
+
+# Stages-based publishing entry point
+- template: /eng/common/templates/post-build/post-build.yml
+ parameters:
+ validateDependsOn:
+ - ${{ each dependency in parameters.dependsOnPublishStages }}:
+ - PublishBlob_${{ dependency.dependsOn }}
+ # Symbol validation is not ready yet. https://github.com/dotnet/arcade/issues/2871
+ enableSymbolValidation: false
+ # SourceLink validation doesn't work in dev builds: tries to pull from GitHub. https://github.com/dotnet/arcade/issues/3604
+ enableSourceLinkValidation: false
+ # Allow symbol publish to emit expected warnings without failing the build. Include single
+ # quotes inside the string so that it passes through to MSBuild without script interference.
+ symbolPublishingAdditionalParameters: "'-warnAsError:$false'"
+
+# Create extra stage per BAR channel that needs extra publish steps. These run after the Arcade
+# stages because they depend on Arcade's NuGet package publish being complete.
+- ${{ each dependency in parameters.dependsOnPublishStages }}:
+ - stage: PublishFinal_${{ dependency.dependsOn }}
+ displayName: '${{ dependency.channel.name }} Finalize'
+ dependsOn:
+ - ${{ dependency.dependsOn }}
+ variables:
+ - template: /eng/common/templates/post-build/common-variables.yml
+ jobs:
+ - template: /eng/jobs/run-publish-project.yml
+ parameters:
+ projectName: publish-final
+ dependency: ${{ dependency }}
diff --git a/global.json b/global.json
new file mode 100644
index 000000000..030e0c14f
--- /dev/null
+++ b/global.json
@@ -0,0 +1,9 @@
+{
+ "tools": {
+ "dotnet": "3.0.100-preview9-014004"
+ },
+ "msbuild-sdks": {
+ "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.19469.8",
+ "Microsoft.DotNet.Build.Tasks.SharedFramework.Sdk": "5.0.0-beta.19502.7"
+ }
+}
diff --git a/pkg/Directory.Build.props b/pkg/Directory.Build.props
new file mode 100644
index 000000000..3efa4ad06
--- /dev/null
+++ b/pkg/Directory.Build.props
@@ -0,0 +1,217 @@
+
+
+
+
+
+
+ $(ProjectDir)LICENSE.TXT
+ $(ProjectDir)THIRD-PARTY-NOTICES.TXT
+ https://github.com/dotnet/core-setup/blob/master/LICENSE.TXT
+ $(MSBuildThisFileDirectory)descriptions.json
+
+ https://go.microsoft.com/fwlink/?LinkID=799417
+ https://dot.net
+
+
+ true
+
+
+
+
+
+
+
+ AnyCPU
+
+ true
+
+ $(PackageOutputPath)
+
+ $(RuntimeIdentifier)
+
+ true
+ true
+
+ true
+
+ true
+
+
+ true
+ false
+
+
+ $(BaseIntermediateOutputPath)
+ $(RestoreOutputPath)\project.assets.json
+
+
+ true
+
+
+ false
+
+ true
+
+
+
+ NetCore
+ VS.Redist.Common
+
+
+
+
+
+
+
+
+
+
+
+ $(MSBuildProjectName)
+
+
+
+ true
+ true
+ true
+
+
+
+
+ unused
+ $(NETCoreAppFrameworkMoniker)
+ $(NETCoreAppFramework)
+ $(NETCoreAppFramework)
+ $(CrossGenRootPath)$(MSBuildProjectName)\
+ true
+ true
+ $(PackageSymbolsBinDir)/$(MSBuildProjectName)
+
+
+
+ $(PackageTargetFramework)
+ $(NETCoreAppFramework)
+
+
+
+
+
+ true
+
+
+ true
+
+
+
+
+ $(MSBuildThisFileDirectory)\windowsdesktopRIDs.props
+
+
+
+
+
+ <_buildingOnRID Include="$(PackageRID)" Condition="'$(BuildOnUnknownPlatforms)' != 'false'">
+ $(Platform)
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+ $(MSBuildProjectFullPath)
+
+
+
+ <_project Include="@(BuildRID)">
+ x64
+ %(Identity)
+ %(BuildRID)
+ PackageTargetRuntime=%(BuildRID);RuntimeIdentifier=%(BuildRID);Platform=%(Platform)
+
+
+
+
+
+
+
+
+
+
+ @(RestoreBuildRID)
+
+ $(PackageRID)
+
+
+
+
+
+ .exe
+
+ .dll
+ .pdb
+
+
+
+
+
+ lib
+ .dylib
+ .dwarf
+
+
+
+
+
+ lib
+ .so
+ .dbg
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/Directory.Build.targets b/pkg/Directory.Build.targets
new file mode 100644
index 000000000..31f32208c
--- /dev/null
+++ b/pkg/Directory.Build.targets
@@ -0,0 +1,336 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(Version)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+ runtimes/$(PackageTargetRuntime)/native
+ true
+
+
+
+
+
+
+
+ <_PackageIdentity Include="$(Id)">
+ $(PackageVersion)
+ $(PackageTargetRuntime)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <_PkgProjDependencyWithoutTFM Include="@(PkgProjDependency)" Condition="'%(PkgProjDependency.TargetFramework)' == '' AND '%(PkgProjDependency.TargetRuntime)' == '' AND '%(PkgProjDependency.DoNotExpand)' != 'true'" />
+ <_AllPkgProjTFMs Include="%(PkgProjDependency.TargetFramework)" Condition="'%(PkgProjDependency.DependencyKind)' == 'Direct'" />
+
+
+
+
+ %(_AllPkgProjTFMs.Identity)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(BuildTargetPath)$(Id).props
+ $(BuildTargetPath)$(Id.Replace('.Internal', '')).PlatformManifest.txt
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <_propsFileTemplateFile Condition="'%(_depprojDataFile.PropsFile)' == 'true'">%(_depprojDataFile.Identity)
+
+
+
+ <_runtimeDependencyItems>@(RuntimeDependency)
+ <_filledPropsFileContent>$([System.IO.File]::ReadAllText('$(_propsFileTemplateFile)')
+ .Replace("__PackageId__", "$(Id.Replace('.', '_'))")
+ .Replace("__PreferredPackages__", "$(Id);$(_runtimeDependencyItems)"))
+
+
+ <_filledPropsFile>$(IntermediateOutputPath)$(MSBuildProjectName).props
+
+
+
+
+
+
+
+
+
+
+
+
+ <_platformManifestFileTemplateFile Condition="'%(_depprojDataFile.PlatformManifestFile)' == 'true'">%(_depprojDataFile.Identity)
+
+
+
+ <_runtimeDependencyItems>@(RuntimeDependency)
+ <_filledPlatformManifestFileContent>$([System.IO.File]::ReadAllText('$(_platformManifestFileTemplateFile)')
+ .Replace("__PackageId__", "$(Id)"))
+
+
+ <_filledPlatformManifestFile>$(IntermediateOutputPath)PlatformManifest.txt
+
+
+
+
+
+
+
+
+
+
+
+ FrameworkList.xml
+ $(IntermediateOutputPath)$(FrameworkListFilename)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(FrameworkListTargetPath)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(PreReleaseVersionLabel.Substring(0,1).ToUpperInvariant())
+ $(ReleaseBrandSuffix)$(PreReleaseVersionLabel.Substring(1,6))
+ $(ReleaseBrandSuffix) $(PreReleaseVersionLabel.Substring(7))
+
+
+
+ Microsoft .NET Core
+ $(ProductionVersion)
+ $(ProductionVersion) $(ReleaseBrandSuffix)
+
+ $(ProductBrandPrefix) Targeting Pack - $(ProductBrandSuffix)
+ $(ProductBrandPrefix) Runtime - $(ProductBrandSuffix)
+
+
+
+
+
+ 5
+
+ 1996-04-01
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pkg/badge/badge.proj b/pkg/badge/badge.proj
new file mode 100644
index 000000000..ebfa5f0e7
--- /dev/null
+++ b/pkg/badge/badge.proj
@@ -0,0 +1,25 @@
+
+
+
+
+
+ $(MSBuildThisFileDirectory)version_badge.svg
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/badge/version_badge.svg b/pkg/badge/version_badge.svg
new file mode 100644
index 000000000..956523b78
--- /dev/null
+++ b/pkg/badge/version_badge.svg
@@ -0,0 +1,16 @@
+
diff --git a/pkg/descriptions.json b/pkg/descriptions.json
new file mode 100644
index 000000000..f2119446f
--- /dev/null
+++ b/pkg/descriptions.json
@@ -0,0 +1,22 @@
+[
+ {
+ "Name": "RuntimePackage",
+ "Description": "Internal implementation package not meant for direct consumption. Please do not reference directly.",
+ "CommonTypes": [ ]
+ },
+ {
+ "Name": "NuGet3MinVersion",
+ "Description": "When using NuGet 3.x this package requires at least version {0}.",
+ "CommonTypes": [ ]
+ },
+ {
+ "Name": "Microsoft.WindowsDesktop.App",
+ "Description": "Shared Framework for Windows Forms and WPF.",
+ "CommonTypes": [ ]
+ },
+ {
+ "Name": "Microsoft.WindowsDesktop.App.Ref",
+ "Description": "Windows Forms and WPF targeting pack. Contains reference assemblies, documentation, and other design-time assets.",
+ "CommonTypes": [ ]
+ }
+]
diff --git a/pkg/packageIndex.json b/pkg/packageIndex.json
new file mode 100644
index 000000000..7a73a41bf
--- /dev/null
+++ b/pkg/packageIndex.json
@@ -0,0 +1,2 @@
+{
+}
\ No newline at end of file
diff --git a/pkg/packaging.stubs.targets b/pkg/packaging.stubs.targets
new file mode 100644
index 000000000..120f000cb
--- /dev/null
+++ b/pkg/packaging.stubs.targets
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
diff --git a/pkg/signed-bundle/signed-bundle.proj b/pkg/signed-bundle/signed-bundle.proj
new file mode 100644
index 000000000..59d4b5e27
--- /dev/null
+++ b/pkg/signed-bundle/signed-bundle.proj
@@ -0,0 +1,65 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/vs-insertion-packages/vs-insertion-packages.proj b/pkg/vs-insertion-packages/vs-insertion-packages.proj
new file mode 100644
index 000000000..86af084d8
--- /dev/null
+++ b/pkg/vs-insertion-packages/vs-insertion-packages.proj
@@ -0,0 +1,21 @@
+
+
+
+
+ $(NETCoreAppFramework)
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/windowsdesktop/Directory.Build.props b/pkg/windowsdesktop/Directory.Build.props
new file mode 100644
index 000000000..7ed652d99
--- /dev/null
+++ b/pkg/windowsdesktop/Directory.Build.props
@@ -0,0 +1,33 @@
+
+
+ Microsoft Windows Desktop
+
+
+
+
+
+ windowsdesktop
+
+ Microsoft.WindowsDesktop.App
+
+ WindowsDesktop
+
+
+ true
+
+
+ true
+
+
+
diff --git a/pkg/windowsdesktop/pkg/Directory.Build.props b/pkg/windowsdesktop/pkg/Directory.Build.props
new file mode 100644
index 000000000..80114f62f
--- /dev/null
+++ b/pkg/windowsdesktop/pkg/Directory.Build.props
@@ -0,0 +1,73 @@
+
+
+ true
+
+
+
+
+
+ Windows Desktop $(NETCoreAppFrameworkVersion)
+ $(NETCoreAppFrameworkIdentifier)
+ $(NETCoreAppFrameworkVersion)
+ $(FrameworkPackageName)
+
+
+
+ false
+ false
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RuntimeIdentifier=$(PackageTargetRuntime)
+
+
+
diff --git a/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Ref.pkgproj b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Ref.pkgproj
new file mode 100644
index 000000000..35e3d8428
--- /dev/null
+++ b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Ref.pkgproj
@@ -0,0 +1,2 @@
+
+
diff --git a/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Runtime.pkgproj b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Runtime.pkgproj
new file mode 100644
index 000000000..35e3d8428
--- /dev/null
+++ b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.Runtime.pkgproj
@@ -0,0 +1,2 @@
+
+
diff --git a/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.pkgproj b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.pkgproj
new file mode 100644
index 000000000..c437f47f2
--- /dev/null
+++ b/pkg/windowsdesktop/pkg/Microsoft.WindowsDesktop.App.pkgproj
@@ -0,0 +1,56 @@
+
+
+
+
+ true
+ build/$(NETCoreAppFramework)/
+
+ data/
+
+ netcoreapp
+ 5.0
+ $(TargetFrameworkName)$(TargetFrameworkVersion)
+
+
+
+
+ true
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.Bundle.bundleproj b/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.Bundle.bundleproj
new file mode 100644
index 000000000..de735ae80
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.Bundle.bundleproj
@@ -0,0 +1,19 @@
+
+
+
+
+ Windows Desktop Shared Framework Bundle Installer
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.SharedFx.sfxproj b/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.SharedFx.sfxproj
new file mode 100644
index 000000000..edd3cdb44
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/Microsoft.WindowsDesktop.App.SharedFx.sfxproj
@@ -0,0 +1,13 @@
+
+
+
+ Microsoft.WindowsDesktop.App
+
+ true
+
+
+
+
+
+
+
diff --git a/pkg/windowsdesktop/sfx/bundle.thm b/pkg/windowsdesktop/sfx/bundle.thm
new file mode 100644
index 000000000..018459e02
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/bundle.thm
@@ -0,0 +1,115 @@
+
+
+ #(loc.Caption)
+ Segoe UI
+ Segoe UI
+ Segoe UI
+ Segoe UI
+ Segoe UI
+ Segoe UI
+
+ #(loc.Title)
+
+
+
+
+ #(loc.HelpHeader)
+ #(loc.HelpText)
+
+
+
+
+
+
+
+
+ #(loc.WelcomeHeaderMessage)
+ #(loc.WelcomeDescription)
+ #(loc.LearnMoreTitle)
+ #(loc.DocumentationLink)
+ #(loc.PrivacyStatementLink)
+ #(loc.EulaLink)
+
+
+
+
+
+
+
+ #(loc.OptionsHeader)
+ #(loc.OptionsLocationLabel)
+
+
+
+
+
+
+
+
+ #(loc.FilesInUseHeader)
+ #(loc.FilesInUseLabel)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #(loc.ProgressHeader)
+ #(loc.ProgressLabel)
+ #(loc.OverallProgressPackageText)
+
+
+
+
+
+
+
+ #(loc.ModifyHeader)
+
+
+
+
+
+
+
+
+ #(loc.SuccessHeader)
+ #(loc.SuccessInstallHeader)
+ #(loc.SuccessRepairHeader)
+ #(loc.SuccessUninstallHeader)
+
+ #(loc.SuccessRestartText)
+
+ #(loc.SuccessInstallLocation)
+ #(loc.SuccessInstallProductName)
+ #(loc.ResourcesHeader)
+ #(loc.DocumentationLink)
+ #(loc.RelaseNotesLink)
+ #(loc.TutorialLink)
+ #(loc.TelemetryLink)
+
+
+
+
+
+
+
+
+ #(loc.FailureHeader)
+ #(loc.FailureInstallHeader)
+ #(loc.FailureUninstallHeader)
+ #(loc.FailureRepairHeader)
+ #(loc.FailureHyperlinkLogText)
+
+ #(loc.FailureRestartText)
+
+
+
+
diff --git a/pkg/windowsdesktop/sfx/bundle.wxl b/pkg/windowsdesktop/sfx/bundle.wxl
new file mode 100644
index 000000000..a8ef4b61b
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] Installer
+ [BUNDLEMONIKER]
+ You just need a shell, a text editor and 10 minutes of your time.
+
+Ready? Set? Let's go!
+ Are you sure you want to cancel?
+ Previous version
+
+ /install | /repair | /uninstall | /layout [directory] - installs, repairs, uninstalls or
+ creates a complete local copy of the bundle in directory. Install is the default.
+
+/passive | /quiet - displays minimal UI with no prompts or displays no UI and
+ no prompts. By default UI and all prompts are displayed.
+
+/norestart - suppress any attempts to restart. By default UI will prompt before restart.
+/log log.txt - logs to a specific file. By default a log file is created in %TEMP%.
+ &Close
+ I &agree to the license terms and conditions
+ &Options
+ &Install
+ &Close
+
+ Install location:
+ &Browse
+ &OK
+ &Cancel
+
+ Processing:
+ Initializing...
+ &Cancel
+
+ &Repair
+ &Uninstall
+ &Close
+
+
+
+
+ &Launch
+ You must restart your computer before you can use the software.
+ &Restart
+ &Close
+
+
+
+
+ One or more issues caused the setup to fail. Please fix the issues and then retry setup. For more information see the <a href="#">log file</a>.
+ You must restart your computer to complete the rollback of the software.
+ &Restart
+ &Close
+ The [PRODUCT_NAME] is not supported on this operating system. For more information, see [LINK_PREREQ_PAGE].
+ The [PRODUCT_NAME] isn't supported on x86 operating systems. Please install using the corresponding x86 installer.
+
+ The following applications are using files that need to be updated:
+ Close the &applications and attempt to restart them.
+ &Do not close applications. A reboot will be required.
+ &OK
+ &Cancel
+
+
+ Learn more about .NET Core
+ The following was installed at [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentation</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Release Notes</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutorials</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core Telemetry</A>
+ <A HREF="https://aka.ms/dev-privacy">Privacy Statement</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET Library EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1028/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1028/bundle.wxl
new file mode 100644
index 000000000..58ffa7eb4
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1028/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] 安裝程式
+ [BUNDLEMONIKER]
+ 您只需要殼層、文字編輯器和 10 分鐘的時間。
+
+準備好了嗎? 讓我們開始吧!
+ 您確定要取消嗎?
+ 前一版
+
+ /install | /repair | /uninstall | /layout [directory] - 在目錄中安裝、修復、解除安裝或
+ 建立搭售方案的完整本機複本。預設為安裝。
+
+/passive | /quiet - 顯示最少 UI 且不含提示,或者不顯示 UI,也
+ 不顯示提示。預設會顯示 UI 和所有提示。
+
+/norestart - 隱藏任何重新啟動嘗試。根據預設,UI 會在重新啟動之前提示。
+/log log.txt - 記錄至特定檔案。預設會在 %TEMP% 建立記錄檔。
+ 關閉(&C)
+ 我同意授權條款及條件(&A)
+ 選項(&O)
+ 安裝(&I)
+ 關閉(&C)
+
+ 安裝位置:
+ 瀏覽(&B)
+ 確定(&O)
+ 取消(&C)
+
+ 處理中:
+ 正在初始化...
+ 取消(&C)
+
+ 修復(&R)
+ 解除安裝(&U)
+ 關閉(&C)
+
+
+
+
+ 啟動(&L)
+ 必須重新啟動電腦,才能使用此軟體。
+ 重新啟動(&R)
+ 關閉(&C)
+
+
+
+
+ 有一個或多個問題導致安裝程式失敗。請解決問題,然後重試一次安裝。如需詳細資訊,請參閱<a href="#">記錄檔</a>。
+ 必須重新啟動電腦,才能完成軟體的復原。
+ 重新啟動(&R)
+ 關閉(&C)
+ 此作業系統不支援 [PRODUCT_NAME]。如需詳細資訊,請參閱 [LINK_PREREQ_PAGE]。
+ x86 作業系統不支援 [PRODUCT_NAME]。請使用對應的 x86 安裝程式來安裝。
+
+ 以下應用程式正在使用需要進行更新的檔案:
+ 關閉應用程式並嘗試重新啟動(&A)
+ 不關閉應用程式,需要重新啟動(&D)
+ 確定(&O)
+ 取消(&C)
+
+
+ 深入了解 .NET Core
+ 下列項目已安裝在 [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">文件</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">版本資訊</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">教學課程</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core 遙測</A>
+ <A HREF="https://aka.ms/dev-privacy">隱私權聲明</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET 程式庫 EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1029/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1029/bundle.wxl
new file mode 100644
index 000000000..5897ba8e8
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1029/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Instalační program pro [WixBundleName]
+ [BUNDLEMONIKER]
+ Potřebujete jenom prostředí, textový editor a 10 minut času.
+
+Jste připraveni? Dejme se tedy do toho!
+ Opravdu chcete akci zrušit?
+ Předchozí verze
+
+ /install | /repair | /uninstall | /layout [adresář] – Nainstaluje, opraví, odinstaluje nebo
+ vytvoří úplnou místní kopii svazku v adresáři. Výchozí možností je instalace.
+
+/passive | /quiet – Zobrazí minimální uživatelské rozhraní bez výzev nebo nezobrazí žádné uživatelské rozhraní a
+ žádné výzvy. Výchozí možností je zobrazení uživatelského rozhraní a všech výzev.
+
+/norestart – potlačí všechny pokusy o restartování. Ve výchozím nastavení uživatelské rozhraní před restartováním zobrazí výzvu.
+/log log.txt – Uloží protokol do konkrétního souboru. Ve výchozím nastavení bude soubor protokolu vytvořen v adresáři %TEMP%.
+ &Zavřít
+ Souhl&asím s licenčními podmínkami.
+ M&ožnosti
+ &Instalovat
+ &Zavřít
+
+ Umístění instalace:
+ &Procházet
+ &OK
+ &Storno
+
+ Zpracování:
+ Inicializuje se...
+ &Storno
+
+ Op&ravit
+ O&dinstalovat
+ &Zavřít
+
+
+
+
+ &Spustit
+ Před použitím tohoto softwaru musíte restartovat počítač.
+ &Restartovat
+ &Zavřít
+
+
+
+
+ Instalace se nepovedla kvůli jednomu nebo více problémům. Opravte prosím tyto problémy a zkuste software nainstalovat znovu. Další informace najdete v <a href="#">souboru protokolu</a>.
+ Pro dokončení vrácení změn tohoto softwaru je potřeba restartovat počítač.
+ &Restartovat
+ &Zavřít
+ [PRODUCT_NAME] se tomto operačním systému nepodporuje. Další informace: [LINK_PREREQ_PAGE]
+ [PRODUCT_NAME] se v operačních systémech pro platformu x86 nepodporuje. Použijte prosím k instalaci odpovídající instalační program pro platformu x86.
+
+ Následující aplikace používají soubory, které je potřeba aktualizovat:
+ Zavřete &aplikace a zkuste je restartovat.
+ A&plikace nezavírejte. Bude potřeba provést restart.
+ &OK
+ &Zrušit
+
+
+ Další informace o .NET Core
+ Do [DOTNETHOME] se nainstalovaly následující položky.
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Dokumentace</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Zpráva k vydání verze</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Kurzy</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Telemetrie pro platformu .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Prohlášení o zásadách ochrany osobních údajů</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Smlouva EULA ke knihovně .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1031/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1031/bundle.wxl
new file mode 100644
index 000000000..8cffcb514
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1031/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName]-Installer
+ [BUNDLEMONIKER]
+ Sie benötigen nur eine Shell, einen Text-Editor und 10 Minuten Zeit.
+
+Bereit? Los geht's!
+ Möchten Sie den Vorgang wirklich abbrechen?
+ Vorherige Version
+
+ /install | /repair | /uninstall | /layout [Verzeichnis] - installiert, repariert, deinstalliert oder
+ erstellt eine vollständige lokale Kopie des Bundles im Verzeichnis. Installieren ist die Standardeinstellung.
+
+/passive | /quiet - zeigt eine minimale Benutzeroberfläche ohne Eingabeaufforderungen oder keine
+ Benutzeroberfläche und keine Eingabeaufforderungen an. Standardmäßig werden die Benutzeroberfläche und alle Eingabeaufforderungen angezeigt.
+
+/norestart - Unterdrückt alle Neustartversuche. Standardmäßig fordert die Benutzeroberfläche zum Bestätigen eines Neustarts auf.
+/log log.txt - Erstellt das Protokoll in einer bestimmten Datei. Standardmäßig wird die Protokolldatei in %TEMP% erstellt.
+ S&chließen
+ Ich &stimme den Lizenzbedingungen zu.
+ &Optionen
+ &Installieren
+ S&chließen
+
+ Installationspfad:
+ &Durchsuchen
+ &OK
+ &Abbrechen
+
+ Wird verarbeitet:
+ Initialisierung...
+ &Abbrechen
+
+ &Reparieren
+ &Deinstallieren
+ S&chließen
+
+
+
+
+ &Starten
+ Sie müssen den Computer neu starten, bevor Sie die Software verwenden können.
+ &Neu starten
+ S&chließen
+
+
+
+
+ Setup ist aufgrund eines oder mehrerer Probleme fehlgeschlagen. Beheben Sie die Probleme, und führen Sie das Setup erneut aus. Weitere Informationen finden Sie in der <a href="#">Protokolldatei</a>.
+ Sie müssen den Computer neu starten, um das Zurücksetzen der Software abzuschließen.
+ &Neu starten
+ &Schließen
+ [PRODUCT_NAME] wird auf diesem Betriebssystem nicht unterstützt. Weitere Informationen finden Sie unter [LINK_PREREQ_PAGE].
+ [PRODUCT_NAME] wird auf x86-Betriebssystemen nicht unterstützt. Installieren Sie das entsprechende x86-Installationsprogramm.
+
+ Die folgenden Anwendungen verwenden Dateien, die aktualisiert werden müssen:
+ Schließen Sie die &Anwendungen, und versuchen Sie sie erneut zu starten.
+ &Anwendungen nicht schließen. Ein Neustart ist erforderlich.
+ &OK
+ &Abbrechen
+
+
+ Weitere Informationen zu .NET Core
+ Folgendes wurde unter [DOTNETHOME] installiert.
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Dokumentation</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Versionshinweise</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutorials</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core-Telemetrie</A>
+ <A HREF="https://aka.ms/dev-privacy">Datenschutzerklärung</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Lizenzbedingungen für die .NET-Bibliothek</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1033/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1033/bundle.wxl
new file mode 100644
index 000000000..a8ef4b61b
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1033/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] Installer
+ [BUNDLEMONIKER]
+ You just need a shell, a text editor and 10 minutes of your time.
+
+Ready? Set? Let's go!
+ Are you sure you want to cancel?
+ Previous version
+
+ /install | /repair | /uninstall | /layout [directory] - installs, repairs, uninstalls or
+ creates a complete local copy of the bundle in directory. Install is the default.
+
+/passive | /quiet - displays minimal UI with no prompts or displays no UI and
+ no prompts. By default UI and all prompts are displayed.
+
+/norestart - suppress any attempts to restart. By default UI will prompt before restart.
+/log log.txt - logs to a specific file. By default a log file is created in %TEMP%.
+ &Close
+ I &agree to the license terms and conditions
+ &Options
+ &Install
+ &Close
+
+ Install location:
+ &Browse
+ &OK
+ &Cancel
+
+ Processing:
+ Initializing...
+ &Cancel
+
+ &Repair
+ &Uninstall
+ &Close
+
+
+
+
+ &Launch
+ You must restart your computer before you can use the software.
+ &Restart
+ &Close
+
+
+
+
+ One or more issues caused the setup to fail. Please fix the issues and then retry setup. For more information see the <a href="#">log file</a>.
+ You must restart your computer to complete the rollback of the software.
+ &Restart
+ &Close
+ The [PRODUCT_NAME] is not supported on this operating system. For more information, see [LINK_PREREQ_PAGE].
+ The [PRODUCT_NAME] isn't supported on x86 operating systems. Please install using the corresponding x86 installer.
+
+ The following applications are using files that need to be updated:
+ Close the &applications and attempt to restart them.
+ &Do not close applications. A reboot will be required.
+ &OK
+ &Cancel
+
+
+ Learn more about .NET Core
+ The following was installed at [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentation</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Release Notes</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutorials</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core Telemetry</A>
+ <A HREF="https://aka.ms/dev-privacy">Privacy Statement</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET Library EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1036/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1036/bundle.wxl
new file mode 100644
index 000000000..f33778986
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1036/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Programme d'installation de [WixBundleName]
+ [BUNDLEMONIKER]
+ Vous avez juste besoin d'un interpréteur de commandes, d'un éditeur de texte et de 10 minutes.
+
+À vos marques ? Prêt ? Partez !
+ Voulez-vous vraiment annuler ?
+ Version précédente
+
+ /install | /repair | /uninstall | /layout [répertoire] - installe, répare, désinstalle ou
+ crée une copie locale complète du bundle dans le répertoire. Install est l'option par défaut.
+
+/passive | /quiet - affiche une interface utilisateur minimale, sans invite, ou n'affiche
+ ni interface utilisateur, ni invite. Par défaut, l'interface utilisateur et toutes les invites sont affichées.
+
+/norestart - supprime toutes les tentatives de redémarrage. Par défaut, l'interface utilisateur affiche une invite avant le redémarrage.
+/log log.txt - enregistre les informations dans un fichier spécifique. Par défaut, un fichier journal est créé dans %TEMP%.
+ &Fermer
+ J'&accepte les conditions générales de la licence
+ &Options
+ &Installer
+ &Fermer
+
+ Emplacement de l'installation :
+ &Parcourir
+ &OK
+ &Annuler
+
+ En cours :
+ Initialisation...
+ &Annuler
+
+ &Réparer
+ &Désinstaller
+ &Fermer
+
+
+
+
+ &Démarrer
+ Vous devez redémarrer votre ordinateur avant de pouvoir utiliser le logiciel.
+ &Redémarrer
+ &Fermer
+
+
+
+
+ Un ou plusieurs problèmes sont à l'origine de l'échec de l'installation. Corrigez ces problèmes, puis recommencez l'installation. Pour plus d'informations, voir le <a href="#">fichier journal</a>.
+ Vous devez redémarrer votre ordinateur pour terminer l'opération de restauration du logiciel.
+ &Redémarrer
+ &Fermer
+ [PRODUCT_NAME] n'est pas pris en charge sur ce système d'exploitation. Pour plus d'informations, consultez [LINK_PREREQ_PAGE].
+ [PRODUCT_NAME] n'est pas pris en charge sur les systèmes d'exploitation x86. Effectuez l'installation à l'aide du programme d'installation x86 correspondant.
+
+ Les applications suivantes utilisent des fichiers nécessitant une mise à jour :
+ &Fermez les applications, puis essayez de les redémarrer.
+ &Ne pas fermer les applications. Un redémarrage sera nécessaire.
+ &OK
+ &Annuler
+
+
+ En savoir plus sur .NET Core
+ L'élément suivant a été installé sur [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentation</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Notes de publication</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutoriels</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Télémétrie .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Déclaration de confidentialité</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">CLUF de la bibliothèque .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1040/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1040/bundle.wxl
new file mode 100644
index 000000000..a8ca8adaf
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1040/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Programma di installazione di [WixBundleName]
+ [BUNDLEMONIKER]
+ Bastano solo una shell, un editor di testo e 10 minuti di tempo.
+
+Pronti per iniziare?
+ Annullare?
+ Versione precedente
+
+ /install | /repair | /uninstall | /layout [directory] - installa, ripara, disinstalla o
+ crea una copia locale completa del bundle nella directory. L'opzione predefinita è install.
+
+/passive | /quiet - visualizza un'interfaccia utente minima senza prompt oppure non visualizza alcuna interfaccia utente
+ né prompt. Per impostazione predefinita, viene visualizzata l'intera interfaccia utente e tutti i prompt.
+
+/norestart - annulla qualsiasi tentativo di riavvio. Per impostazione predefinita, l'interfaccia utente visualizza una richiesta prima del riavvio.
+/log log.txt - registra il log in un file specifico. Per impostazione predefinita, viene creato un file di log in %TEMP%.
+ &Chiudi
+ &Accetto i termini e le condizioni di licenza
+ &Opzioni
+ &Installa
+ &Chiudi
+
+ Percorso di installazione:
+ &Sfoglia
+ &OK
+ &Annulla
+
+ Elaborazione di:
+ Inizializzazione in corso...
+ &Annulla
+
+ &Ripristina
+ &Disinstalla
+ &Chiudi
+
+
+
+
+ &Avvia
+ Per poter usare il software, è necessario riavviare il computer.
+ &Riavvia
+ &Chiudi
+
+
+
+
+ L'installazione non è riuscita a causa di uno o più problemi. Risolvere i problemi e ripetere l'installazione. Per altre informazioni, vedere il <a href="#">file di log</a>.
+ Per completare il rollback del software, è necessario riavviare il computer.
+ &Riavvia
+ &Chiudi
+ [PRODUCT_NAME] non è supportato in questo sistema operativo. Per altre informazioni, vedere [LINK_PREREQ_PAGE].
+ [PRODUCT_NAME] non è supportato in sistemi operativi x86. Eseguire l'installazione usando il programma di installazione x86 corrispondente.
+
+ Le applicazioni seguenti usano file che necessitano di aggiornamento:
+ Chiudere le &applicazioni e provare a riavviarle.
+ &Non chiudere le applicazioni; sarà necessario riavviare il sistema
+ &OK
+ &Annulla
+
+
+ Altre informazioni su .NET Core
+ I componenti seguenti sono stati installati in [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentazione</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Note sulla versione</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Esercitazioni</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Telemetria di .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Informativa sulla privacy</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Condizioni di licenza della libreria .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1041/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1041/bundle.wxl
new file mode 100644
index 000000000..17cd24efe
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1041/bundle.wxl
@@ -0,0 +1,75 @@
+
+
+ [WixBundleName] インストーラー
+ [BUNDLEMONIKER]
+ 必要なのは、シェル、テキスト エディター、それに時間が 10 分のみです。
+
+では、始めましょう。
+ 取り消しますか?
+ 以前のバージョン
+
+ /install | /repair | /uninstall | /layout [directory] - バンドルの完全なローカル コピーに対する
+ ディレクトリへのインストール、修復、ディレクトリからのアンインストール、またはディレクトリ内への
+ 作成を行います。既定の設定はインストールです。
+
+/passive | /quiet - 最小限の UI だけを表示しプロンプトは表示しない、または UI もプロンプトも
+ 表示しません。既定では UI とすべてのプロンプトが表示されます。
+
+/norestart - 再起動を抑制します。既定では再起動前に確認メッセージが表示されます。
+/log log.txt - 特定のファイルに記録します。ログ ファイルは既定では %TEMP% に作成されます。
+ 閉じる(&C)
+ ライセンス条項および使用条件に同意する(&A)
+ オプション(&O)
+ インストール(&I)
+ 閉じる(&C)
+
+ インストール場所:
+ 参照(&B)
+ OK(&O)
+ キャンセル(&C)
+
+ 処理中:
+ 初期化しています...
+ キャンセル(&C)
+
+ 修復(&R)
+ アンインストール(&U)
+ 閉じる(&C)
+
+
+
+
+ 起動(&L)
+ ソフトウェアを使用する前にコンピューターを再起動する必要があります。
+ 再起動(&R)
+ 閉じる(&C)
+
+
+
+
+ 1 つまたは複数の問題により、セットアップが失敗しました。問題を解決してからセットアップを再試行してください。詳細については、<a href="#">ログ ファイル</a>を参照してください。
+ ソフトウェアのロールバックを完了するには、コンピューターを再起動する必要があります。
+ 再起動(&R)
+ 閉じる(&C)
+ [PRODUCT_NAME] は、このオペレーティング システムではサポートされていません。詳細については、[LINK_PREREQ_PAGE] を参照してください。
+ x86 オペレーティング システムでは、[PRODUCT_NAME] はサポートされていません。対応する x86 インストーラーを使用してインストールしてください。
+
+ 次のアプリケーションは、更新の必要があるファイルを使用しています:
+ アプリケーションを閉じて再起動を試みる。(&A)
+ アプリケーションを終了させない (コンピューターの再起動が必要になります)(&D)
+ OK(&O)
+ キャンセル(&C)
+
+
+ .Net Core の詳細
+ [DOTNETHOME] に以下がインストールされました
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">ドキュメント</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">リリース ノート</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">チュートリアル</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core テレメトリ</A>
+ <A HREF="https://aka.ms/dev-privacy">プライバシーに関する声明</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET ライブラリのライセンス条項</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1042/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1042/bundle.wxl
new file mode 100644
index 000000000..7e452aa4f
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1042/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] 설치 관리자
+ [BUNDLEMONIKER]
+ 셸, 텍스트 편집기, 10분의 시간만 있으면 됩니다.
+
+준비되셨나요? 시작합니다!
+ 취소하시겠습니까?
+ 이전 버전
+
+ /install | /repair | /uninstall | /layout [directory] - 디렉터리에 번들의 전체 로컬 복사본을 설치, 복구, 제거 또는
+ 작성합니다. 설치가 기본값입니다.
+
+/passive | /quiet - 프롬프트 없이 최소 UI를 표시하거나 UI 및
+ 프롬프트를 표시하지 않습니다. 기본적으로 UI와 모든 프롬프트가 표시됩니다.
+
+/norestart - 다시 시작하지 않게 합니다. 기본적으로 UI에서는 다시 시작하기 전에 묻는 메시지를 표시합니다.
+/log log.txt - 특정 파일에 기록합니다. 기본적으로 로그 파일은 %TEMP%에 만들어집니다.
+ 닫기(&C)
+ 동의함(&A)
+ 옵션(&O)
+ 설치(&I)
+ 닫기(&C)
+
+ 설치 위치:
+ 찾아보기(&B)
+ 확인(&O)
+ 취소(&C)
+
+ 처리 중:
+ 초기화 중...
+ 취소(&C)
+
+ 복구(&R)
+ 제거(&U)
+ 닫기(&C)
+
+
+
+
+ 시작(&L)
+ 소프트웨어를 사용하려면 먼저 컴퓨터를 다시 시작해야 합니다.
+ 다시 시작(&R)
+ 닫기(&C)
+
+
+
+
+ 하나 이상의 문제가 발생하여 설치하지 못했습니다. 문제를 해결한 다음 설치를 다시 시도하십시오. 자세한 내용은 <a href="#">로그 파일</a>을 참조하십시오.
+ 소프트웨어 롤백을 완료하려면 컴퓨터를 다시 시작해야 합니다.
+ 다시 시작(&R)
+ 닫기(&C)
+ 이 운영 체제에서는 [PRODUCT_NAME]이(가) 지원되지 않습니다. 자세한 내용은 [LINK_PREREQ_PAGE]을(를) 참조하세요.
+ x86 운영 체제에서는 [PRODUCT_NAME]이(가) 지원되지 않습니다. 해당 x86 설치 관리자를 사용하여 설치하세요.
+
+ 다음의 응용 프로그램이 업데이트해야 할 파일을 사용 중입니다.
+ 응용 프로그램을 닫고 다시 시작합니다(&A).
+ 애플리케이션을 닫지 않습니다(&D). 다시 부팅해야 합니다.
+ 확인(&O)
+ 취소(&C)
+
+
+ .NET Core에 대한 자세한 정보
+ 다음이 [DOTNETHOME]에 설치되었습니다.
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">설명서</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">릴리스 정보</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">자습서</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core 원격 분석</A>
+ <A HREF="https://aka.ms/dev-privacy">개인정보처리방침</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET Library EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1045/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1045/bundle.wxl
new file mode 100644
index 000000000..10015430c
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1045/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Instalator pakietu [WixBundleName]
+ [BUNDLEMONIKER]
+ Potrzebujemy tylko powłoki, edytora tekstu i 10 minut czasu.
+
+Wszystko gotowe? Zaczynamy!
+ Czy na pewno chcesz anulować?
+ Poprzednia wersja
+
+ /install | /repair | /uninstall | /layout [katalog] - Instaluje, naprawia, odinstalowuje
+ lub tworzy pełną lokalną kopię pakietu w katalogu. Domyślnie jest używany przełącznik install.
+
+/passive | /quiet - Wyświetla ograniczony interfejs użytkownika bez monitów albo nie wyświetla ani interfejsu użytkownika,
+ ani monitów. Domyślnie jest wyświetlany interfejs użytkownika oraz wszystkie monity.
+
+/norestart - Pomija próby ponownego uruchomienia. Domyślnie interfejs użytkownika wyświetla monit przed ponownym uruchomieniem.
+/log log.txt - Tworzy dziennik w określonym pliku. Domyślnie plik dziennika jest tworzony w katalogu %TEMP%.
+ &Zamknij
+ &Zgadzam się z postanowieniami licencyjnymi
+ &Opcje
+ &Zainstaluj
+ &Zamknij
+
+ Lokalizacja instalacji:
+ &Przeglądaj
+ &OK
+ &Anuluj
+
+ Przetwarzanie:
+ Trwa inicjowanie...
+ &Anuluj
+
+ &Napraw
+ &Odinstaluj
+ &Zamknij
+
+
+
+
+ &Uruchom
+ Aby móc korzystać z oprogramowania, musisz uruchomić ponownie komputer.
+ &Uruchom ponownie
+ &Zamknij
+
+
+
+
+ Co najmniej jeden problem spowodował niepowodzenie instalacji. Rozwiąż problemy, a następnie ponów próbę instalacji. Aby uzyskać więcej informacji, zobacz <a href="#">plik dziennika</a>.
+ Aby ukończyć wycofywanie oprogramowania, musisz uruchomić ponownie komputer.
+ &Uruchom ponownie
+ &Zamknij
+ Produkt [PRODUCT_NAME] nie jest obsługiwany w tym systemie operacyjnym. Aby uzyskać więcej informacji, zobacz [LINK_PREREQ_PAGE].
+ Produkt [PRODUCT_NAME] nie jest obsługiwany w systemach operacyjnych x86. Przeprowadź instalację przy użyciu odpowiedniego instalatora x86.
+
+ Następujące aplikacje korzystają z plików, które muszą zostać zaktualizowane:
+ Zamknij &aplikacje i spróbuj je ponownie uruchomić.
+ &Nie zamykaj aplikacji. Będzie konieczne ponowne uruchomienie.
+ &OK
+ &Anuluj
+
+
+ Dowiedz się więcej o platformie .NET Core
+ Następujące elementy zainstalowano w [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Dokumentacja</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Informacje o wersji</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Samouczki</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Telemetria programu .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Zasady zachowania poufności informacji</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Umowa licencyjna użytkownika oprogramowania biblioteki .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1046/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1046/bundle.wxl
new file mode 100644
index 000000000..ff235b6d4
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1046/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Instalador do [WixBundleName]
+ [BUNDLEMONIKER]
+ Você só precisa de um shell, um editor de texto e 10 minutos de seu tempo.
+
+Tudo pronto? Então, vamos nessa!
+ Tem certeza de que deseja cancelar?
+ Versão anterior
+
+ /install | /repair | /uninstall | /layout [diretório] - instala, repara, desinstala ou
+ cria uma cópia local completa do pacote no diretório. Install é o padrão
+
+/passive | /quiet - exibe a interface do usuário mínima sem nenhum prompt ou não exibe nenhuma interface do usuário e
+ nenhum prompt. Por padrão, a interface do usuário e todos os prompts são exibidos.
+
+/norestart - suprime qualquer tentativa de reiniciar. Por padrão, a interface do usuário perguntará antes de reiniciar.
+/log log.txt - registra em um arquivo específico. Por padrão, um arquivo de log é criado em %TEMP%.
+ &Fechar
+ &Concordo com os termos e condições da licença
+ &Opções
+ &Instalar
+ &Fechar
+
+ Local de instalação:
+ &Navegar
+ &OK
+ &Cancelar
+
+ Processando:
+ Inicializando...
+ &Cancelar
+
+ &Reparar
+ &Desinstalar
+ &Fechar
+
+
+
+
+ &Iniciar
+ Você deve reiniciar seu computador antes de usar o software.
+ &Reiniciar
+ &Fechar
+
+
+
+
+ Um ou mais problemas causaram falha na instalação. Corrija-os e tente instalar novamente. Para obter mais informações, consulte o <a href="#">arquivo de log</a>.
+ Você deve reiniciar seu computador para concluir a reversão do software.
+ &Reiniciar
+ &Fechar
+ Não há suporte para o [PRODUCT_NAME] neste sistema operacional. Para obter mais informações, confira [LINK_PREREQ_PAGE].
+ O [PRODUCT_NAME] não tem suporte em sistemas operacionais x86. Instale usando o instalador x86 correspondente.
+
+ Os aplicativos a seguir estão usando arquivos que precisam ser atualizados:
+ Feche os &aplicativos e tente reiniciá-los.
+ &Não feche os aplicativos. Será necessária uma reinicialização.
+ &OK
+ &Cancelar
+
+
+ Saiba mais sobre o .NET Core
+ O seguinte foi instalado em [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentação</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Notas sobre a Versão</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutoriais</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Telemetria do .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Política de Privacidade</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Termos de licença da Biblioteca do .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1049/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1049/bundle.wxl
new file mode 100644
index 000000000..d963403c8
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1049/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Установщик [WixBundleName]
+ [BUNDLEMONIKER]
+ Вам требуется только оболочка, текстовый редактор и 10 минут времени.
+
+Готовы? Тогда начинаем!
+ Вы действительно хотите отменить?
+ Предыдущая версия
+
+ /install | /repair | /uninstall | /layout [каталог] — установка, восстановление, удаление или
+ создание полной локальной копии пакета в каталоге. По умолчанию — установка.
+
+/passive | /quiet — отображение минимального пользовательского интерфейса без запросов или работа без пользовательского интерфейса и
+ без запросов. По умолчанию отображаются пользовательский интерфейс и все запросы.
+
+/norestart — отключение всех попыток перезагрузки. По умолчанию в пользовательском интерфейсе перед перезагрузкой отображается запрос.
+/log log.txt — запись журнала в указанный файл. По умолчанию файл журнала создается в папке %TEMP%.
+ &Закрыть
+ Я &принимаю условия лицензии
+ &Параметры
+ &Установить
+ &Закрыть
+
+ Расположение установки:
+ &Обзор
+ &ОК
+ Отм&ена
+
+ Обработка:
+ Идет инициализация...
+ Отм&ена
+
+ &Исправить
+ &Удалить
+ &Закрыть
+
+
+
+
+ &Запустить
+ Перед использованием программного обеспечения необходимо перезапустить компьютер.
+ &Перезапустить
+ &Закрыть
+
+
+
+
+ Одна или несколько проблем вызывали сбой программы установки. Исправьте эти проблемы и попробуйте повторить установку. Дополнительные сведения см. в <a href="#">файле журнала</a>.
+ Необходимо перезагрузить компьютер, чтобы завершить откат программного обеспечения.
+ &Перезапустить
+ З&акрыть
+ Продукт [PRODUCT_NAME] не поддерживается в этой операционной системе. Дополнительные сведения: [LINK_PREREQ_PAGE].
+ Продукт [PRODUCT_NAME] не поддерживается в операционных системах x86. Установите с помощью соответствующего установщика x86.
+
+ Следующие приложения используют файлы, которые следует обновить:
+ Закрыть &приложения и попытаться перезапустить их.
+ &Не закрывать приложения. Потребуется перезагрузка.
+ О&К
+ &Отмена
+
+
+ Дополнительные сведения о .NET Core
+ Следующее было установлено в [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Документация</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Заметки о выпуске</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Руководства</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Телеметрия .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Заявление о конфиденциальности</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">Лицензионное соглашение для библиотеки .NET</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/1055/bundle.wxl b/pkg/windowsdesktop/sfx/theme/1055/bundle.wxl
new file mode 100644
index 000000000..cbe4da8a3
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/1055/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] Yükleyicisi
+ [BUNDLEMONIKER]
+ Yalnızca bir kabuğa, bir metin düzenleyicisine ve 10 dakikalık bir zamana ihtiyacınız var.
+
+Hazır mısınız? Haydi başlayalım!
+ İptal etmek istediğinizden emin misiniz?
+ Önceki sürüm
+
+ /install | /repair | /uninstall | /layout [dizin] - yükler, onarır, kaldırır ya da
+ dizindeki paketin tam bir yerel kopyasını oluşturur. Varsayılan install değeridir.
+
+/passive | /quiet - en az düzeyde istemsiz UI gösterir ya da hiç UI göstermez ve
+ istem yoktur. Varsayılan olarak UI ve tüm istemler görüntülenir.
+
+/norestart - yeniden başlama denemelerini engeller. Varsayılan olarak UI yeniden başlatılmadan önce sorar.
+/log log.txt - belirli bir günlük dosyası tutar. Varsayılan olarak %TEMP% içinde bir günlük dosyası oluşturulur.
+ &Kapat
+ Lisans &hüküm ve koşullarını kabul ediyorum
+ &Seçenekler
+ &Yükle
+ &Kapat
+
+ Yükleme konumu:
+ &Gözat
+ &Tamam
+ İ&ptal
+
+ İşleniyor:
+ Başlatılıyor...
+ İ&ptal
+
+ &Onar
+ K&aldır
+ &Kapat
+
+
+
+
+ &Başlat
+ Yazılımı kullanabilmek için bilgisayarınızı yeniden başlatmanız gerekiyor.
+ Yeniden &Başlat
+ &Kapat
+
+
+
+
+ En az bir sorun nedeniyle kurulum başarısız oldu. Lütfen bu sorunları düzeltin ve kurulumu yeniden deneyin. Daha fazla bilgi için <a href="#">günlük dosyasına</a> bakın.
+ Yazılımın geri alınmasını tamamlamak için bilgisayarınızı yeniden başlatmanız gerekiyor.
+ Yeniden &Başlat
+ &Kapat
+ [PRODUCT_NAME] bu işletim sisteminde desteklenmiyor. Daha fazla bilgi için bkz. [LINK_PREREQ_PAGE].
+ [PRODUCT_NAME], x86 işletim sistemlerinde desteklenmiyor. Lütfen karşılık gelen x86 yükleyicisini kullanarak yükleyin.
+
+ Şu uygulamalar güncelleştirilmesi gereken dosyaları kullanıyor:
+ &Uygulamaları kapatın ve yeniden başlatmayı deneyin.
+ &Uygulamaları kapatmayın. Sistemi yeniden başlatmanız gerekir.
+ &Tamam
+ &İptal
+
+
+ .NET Core hakkında daha fazla bilgi edinin
+ Aşağıdakiler [DOTNETHOME] konumunda yüklendi
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Belgeler</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Sürüm Notları</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Öğreticiler</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core Telemetrisi</A>
+ <A HREF="https://aka.ms/dev-privacy">Gizlilik Bildirimi</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET Kitaplığı EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/2052/bundle.wxl b/pkg/windowsdesktop/sfx/theme/2052/bundle.wxl
new file mode 100644
index 000000000..8c61fb7ee
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/2052/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ [WixBundleName] 安装程序
+ [BUNDLEMONIKER]
+ 你只需要一个 shell、一个文本编辑器,还需花 10 分钟即可。.
+
+准备好了吗? 要设置? 让我们开始吧!
+ 是否确实要取消?
+ 上一版本
+
+ /install | /repair | /uninstall | /layout [目录] - 安装、修复、卸载
+ 目录中的安装包或创建其完整本地副本。Install 为默认选择。
+
+/passive | /quiet - 显示最少的 UI 且无提示,或不显示 UI 且
+ 无提示。默认显示 UI 及全部提示。
+
+/norestart - 禁止任何重新启动。默认在重新启动前显示提示 UI。
+/log log.txt - 向特定文件写入日志。默认在 %TEMP% 中创建日志文件。
+ 关闭(&C)
+ 我同意许可条款和条件(&A)
+ 选项(&O)
+ 安装(&I)
+ 关闭(&C)
+
+ 安装位置:
+ 浏览(&B)
+ 确定(&O)
+ 取消(&C)
+
+ 正在处理:
+ 正在初始化…
+ 取消(&C)
+
+ 修复(&R)
+ 卸载(&U)
+ 关闭(&C)
+
+
+
+
+ 启动(&L)
+ 在使用此软件之前,您必须重新启动计算机。
+ 重新启动(&R)
+ 关闭(&C)
+
+
+
+
+ 一个或多个问题导致了安装失败。请修复这些问题,然后重试安装。有关详细信息,请参阅<a href="#">日志文件</a>。
+ 必须重新启动计算机才能完成软件回退。
+ 重新启动(&R)
+ 关闭(&C)
+ 此操作系统不支持 [PRODUCT_NAME]。有关详细信息,请参阅[LINK_PREREQ_PAGE]。
+ x86 操作系统不支持该 [PRODUCT_NAME]。请使用相应的 x86 安装程序进行安装。
+
+ 以下应用程序正在使用的文件需要更新:
+ 关闭应用程序并尝试重启(&A)。
+ 不关闭应用程序(&D)。需要重启。
+ 确定(&O)
+ 取消(&C)
+
+
+ 了解有关 .NET Core 的详细信息
+ 以下项已安装到 [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">文档</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">发行说明</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">教程</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">.NET Core 遥测</A>
+ <A HREF="https://aka.ms/dev-privacy">隐私声明</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">.NET 库 EULA</A>
+
+
diff --git a/pkg/windowsdesktop/sfx/theme/3082/bundle.wxl b/pkg/windowsdesktop/sfx/theme/3082/bundle.wxl
new file mode 100644
index 000000000..f66fa3b59
--- /dev/null
+++ b/pkg/windowsdesktop/sfx/theme/3082/bundle.wxl
@@ -0,0 +1,74 @@
+
+
+ Instalador de [WixBundleName]
+ [BUNDLEMONIKER]
+ Solo necesita un shell, un editor de texto y 10 minutos.
+
+¿Preparados? ¿Listos? ¡Ya!
+ ¿Está seguro de que desea cancelar?
+ Versión anterior
+
+ /install | /repair | /uninstall | /layout [directory] - instala, repara, desinstala o
+ crea una copia local completa del paquete en el directorio. Install es la opción predeterminada.
+
+/passive | /quiet - muestra una IU mínima sin peticiones, o bien no muestra la IU
+ ni las peticiones. De forma predeterminada, se muestran la IU y todas las peticiones.
+
+/norestart - suprime los intentos de reiniciar. De forma predeterminada, la IU preguntará antes de reiniciar.
+/log log.txt - se registra en un archivo específico. De forma predeterminada, se crea un archivo de registro en %TEMP%.
+ &Cerrar
+ &Acepto los términos y condiciones de licencia
+ &Opciones
+ &Instalar
+ &Cerrar
+
+ Ubicación de instalación:
+ E&xaminar
+ &Aceptar
+ &Cancelar
+
+ Procesando:
+ Inicializando...
+ &Cancelar
+
+ &Reparar
+ &Desinstalar
+ &Cerrar
+
+
+
+
+ &Iniciar
+ Debe reiniciar el equipo para poder usar el software.
+ &Reiniciar
+ &Cerrar
+
+
+
+
+ Error de instalación debido a uno o varios problemas. Corrija los problemas e intente de nuevo la instalación. Para obtener más información, consulte el <a href="#">archivo de registro</a>.
+ Debe reiniciar el equipo para completar la reversión del software.
+ &Reiniciar
+ &Cerrar
+ [PRODUCT_NAME] no se admite en este sistema operativo. Para obtener más información, consulte [LINK_PREREQ_PAGE].
+ [PRODUCT_NAME] no es compatible con los sistemas operativos x86. Instálelo con el instalador x86 correspondiente.
+
+ Las siguientes aplicaciones usan archivos que se deben actualizar:
+ Cerrar las &aplicaciones e intentar reiniciarlas.
+ &No cerrar las aplicaciones. Será necesario un reinicio.
+ &Aceptar
+ &Cancelar
+
+
+ Más información sobre .NET Core
+ Lo siguiente se instaló en [DOTNETHOME]
+ - [BUNDLEMONIKER]
+
+ <A HREF="https://aka.ms/dotnet-docs">Documentación</A>
+ <A HREF="https://aka.ms/20-p2-rel-notes">Notas de la versión</A>
+ <A HREF="https://aka.ms/dotnet-tutorials">Tutoriales</A>
+ <A HREF="https://aka.ms/dotnet-cli-telemetry">Telemetría de .NET Core</A>
+ <A HREF="https://aka.ms/dev-privacy">Declaración de privacidad</A>
+ <A HREF="https://go.microsoft.com/fwlink/?LinkId=329770">CLUF de la biblioteca .NET</A>
+
+
diff --git a/pkg/windowsdesktop/src/windowsdesktop.depproj b/pkg/windowsdesktop/src/windowsdesktop.depproj
new file mode 100644
index 000000000..43e9a0b75
--- /dev/null
+++ b/pkg/windowsdesktop/src/windowsdesktop.depproj
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pkg/windowsdesktopRIDs.props b/pkg/windowsdesktopRIDs.props
new file mode 100644
index 000000000..f4a1d748c
--- /dev/null
+++ b/pkg/windowsdesktopRIDs.props
@@ -0,0 +1,19 @@
+
+
+
+
+ $(OutputRid)
+
+
+ false
+
+
+
+
+
+
+
+
diff --git a/publish/Directory.Build.props b/publish/Directory.Build.props
new file mode 100644
index 000000000..6548989b5
--- /dev/null
+++ b/publish/Directory.Build.props
@@ -0,0 +1,32 @@
+
+
+
+
+
+ $(NETCoreAppFramework)
+
+
+
+
+ false
+ true
+
+
+
+ https://dotnetcli.blob.core.windows.net/
+ WindowsDesktop
+
+ .sha512
+ $(ArtifactsDir)PackageDownload/
+
+
+ $(PublishBlobFeedUrl)
+ https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
+
+
+
+
+
+
+
+
diff --git a/publish/Directory.Build.targets b/publish/Directory.Build.targets
new file mode 100644
index 000000000..bb3927313
--- /dev/null
+++ b/publish/Directory.Build.targets
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/publish/prepare-artifacts.proj b/publish/prepare-artifacts.proj
new file mode 100644
index 000000000..c99b6105f
--- /dev/null
+++ b/publish/prepare-artifacts.proj
@@ -0,0 +1,141 @@
+
+
+
+
+
+
+
+
+
+
+ $(ArtifactsObjDir)PreparedFileUpload\
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(PackagesUrl)
+ Manifest.xml
+ $(ArtifactsLogDir)AssetManifest/$(AssetManifestFilename)
+
+
+ $(ArtifactsObjDir)TempWorkingDir\$([System.Guid]::NewGuid())\
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(PackagesUrl)
+ Manifest_Installers.xml
+ $(ArtifactsLogDir)AssetManifest/$(AssetManifestFilename)
+
+
+ $(ArtifactsObjDir)TempWorkingDir\$([System.Guid]::NewGuid())\
+
+
+
+
+
+
+
+ assets/windowsdesktop/$(InstallersRelativePath)%(Filename)%(Extension)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/publish/publish-blobs.proj b/publish/publish-blobs.proj
new file mode 100644
index 000000000..ed3b2d67d
--- /dev/null
+++ b/publish/publish-blobs.proj
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/publish/publish-final.proj b/publish/publish-final.proj
new file mode 100644
index 000000000..e38656e4c
--- /dev/null
+++ b/publish/publish-final.proj
@@ -0,0 +1,83 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/signing/Directory.Build.props b/signing/Directory.Build.props
new file mode 100644
index 000000000..0054dfb04
--- /dev/null
+++ b/signing/Directory.Build.props
@@ -0,0 +1,12 @@
+
+
+
+
+
+ $(NETCoreAppFramework)
+
+
+ true
+
+
+
diff --git a/signing/Directory.Build.targets b/signing/Directory.Build.targets
new file mode 100644
index 000000000..0afd9438c
--- /dev/null
+++ b/signing/Directory.Build.targets
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/signing/SignBinaries.proj b/signing/SignBinaries.proj
new file mode 100644
index 000000000..8b1f6811f
--- /dev/null
+++ b/signing/SignBinaries.proj
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/signing/SignBurnBundleFiles.proj b/signing/SignBurnBundleFiles.proj
new file mode 100644
index 000000000..55c7a8c58
--- /dev/null
+++ b/signing/SignBurnBundleFiles.proj
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
diff --git a/signing/SignBurnEngineFiles.proj b/signing/SignBurnEngineFiles.proj
new file mode 100644
index 000000000..382eb2023
--- /dev/null
+++ b/signing/SignBurnEngineFiles.proj
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/signing/SignFinalPackages.proj b/signing/SignFinalPackages.proj
new file mode 100644
index 000000000..35e3d8428
--- /dev/null
+++ b/signing/SignFinalPackages.proj
@@ -0,0 +1,2 @@
+
+
diff --git a/signing/SignMsiFiles.proj b/signing/SignMsiFiles.proj
new file mode 100644
index 000000000..55dd18dca
--- /dev/null
+++ b/signing/SignMsiFiles.proj
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/windowsdesktop/Directory.Build.props b/src/windowsdesktop/Directory.Build.props
new file mode 100644
index 000000000..4b971c5b5
--- /dev/null
+++ b/src/windowsdesktop/Directory.Build.props
@@ -0,0 +1,15 @@
+
+
+
+
+ $(ProjectDir)src\test\
+ $(TestDir)Assets\
+ $(ObjDir)TestStabilizedPackages\
+ $(ObjDir)TestPackageCache\
+ $(ObjDir)TestNuGetConfig\NuGet.config
+ $(ObjDir)ExtraNupkgsForTestRestore\
+ $(TargetArchitecture)
+ netcoreapp3.0
+
+
+
diff --git a/src/windowsdesktop/Directory.Build.targets b/src/windowsdesktop/Directory.Build.targets
new file mode 100644
index 000000000..85407dcfb
--- /dev/null
+++ b/src/windowsdesktop/Directory.Build.targets
@@ -0,0 +1,74 @@
+
+
+
+
+
+ $([System.IO.Path]::DirectorySeparatorChar)
+ $([System.String]::Copy('$(TestsOutputDir)').Replace('/', '$(DirectorySeparatorChar)'))
+ $([System.String]::Copy('$(SystemPathTestsOutputDir)').Replace('\', '$(DirectorySeparatorChar)'))
+
+
+ $(NetCoreRoot)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(_HostRid)
+ $(MSBuildProjectName)
+
+ $(ArtifactsDir)tests/$(ConfigurationGroup)/
+ $(TestsOutputRootDir)$(TestsOutputName)/
+
+
+
+
+
diff --git a/src/windowsdesktop/TestUtils/RepoDirectoriesProvider.cs b/src/windowsdesktop/TestUtils/RepoDirectoriesProvider.cs
new file mode 100644
index 000000000..f30695fd5
--- /dev/null
+++ b/src/windowsdesktop/TestUtils/RepoDirectoriesProvider.cs
@@ -0,0 +1,132 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Collections.Immutable;
+using System.IO;
+using System.Linq;
+
+namespace Microsoft.DotNet.CoreSetup.Test
+{
+ public class RepoDirectoriesProvider
+ {
+ public string BuildRID { get; }
+ public string BuildArchitecture { get; }
+ public string TargetRID { get; }
+ public string MicrosoftNETCoreAppVersion { get; }
+ public string Configuration { get; }
+ public string RepoRoot { get; }
+ public string BaseArtifactsFolder { get; }
+ public string BaseBinFolder { get; }
+ public string BaseObjFolder { get; }
+ public string Artifacts { get; }
+ public string HostArtifacts { get; }
+ public string BuiltDotnet { get; }
+ public string NugetPackages { get; }
+ public string CorehostPackages { get; }
+ public string DotnetSDK { get; }
+
+ private string _testContextVariableFilePath { get; }
+ private ImmutableDictionary _testContextVariables { get; }
+
+ public RepoDirectoriesProvider(
+ string repoRoot = null,
+ string artifacts = null,
+ string builtDotnet = null,
+ string nugetPackages = null,
+ string corehostPackages = null,
+ string dotnetSdk = null,
+ string microsoftNETCoreAppVersion = null)
+ {
+ RepoRoot = repoRoot ?? GetRepoRootDirectory();
+
+ _testContextVariableFilePath = Path.Combine(
+ Directory.GetCurrentDirectory(),
+ "TestContextVariables.txt");
+
+ _testContextVariables = File.ReadAllLines(_testContextVariableFilePath)
+ .ToImmutableDictionary(
+ line => line.Substring(0, line.IndexOf('=')),
+ line => line.Substring(line.IndexOf('=') + 1),
+ StringComparer.OrdinalIgnoreCase);
+
+ BaseArtifactsFolder = artifacts ?? Path.Combine(RepoRoot, "artifacts");
+ BaseBinFolder = artifacts ?? Path.Combine(BaseArtifactsFolder, "bin");
+ BaseObjFolder = artifacts ?? Path.Combine(BaseArtifactsFolder, "obj");
+
+ TargetRID = GetTestContextVariable("TEST_TARGETRID");
+ BuildRID = GetTestContextVariable("BUILDRID");
+ BuildArchitecture = GetTestContextVariable("BUILD_ARCHITECTURE");
+ MicrosoftNETCoreAppVersion = microsoftNETCoreAppVersion ?? GetTestContextVariable("MNA_VERSION");
+
+ Configuration = GetTestContextVariable("BUILD_CONFIGURATION");
+ string osPlatformConfig = $"{BuildRID}.{Configuration}";
+
+ DotnetSDK = dotnetSdk ?? GetTestContextVariable("DOTNET_SDK_PATH");
+
+ if (!Directory.Exists(DotnetSDK))
+ {
+ throw new InvalidOperationException("ERROR: Test SDK folder not found.");
+ }
+
+ Artifacts = Path.Combine(BaseBinFolder, osPlatformConfig);
+ HostArtifacts = artifacts ?? Path.Combine(Artifacts, "corehost");
+
+ NugetPackages = nugetPackages ??
+ GetTestContextVariable("NUGET_PACKAGES") ??
+ Path.Combine(RepoRoot, ".packages");
+
+ CorehostPackages = corehostPackages ?? Path.Combine(Artifacts, "corehost");
+ BuiltDotnet = builtDotnet ?? Path.Combine(BaseObjFolder, osPlatformConfig, "sharedFrameworkPublish");
+ }
+
+ public string GetTestContextVariable(string name)
+ {
+ return GetTestContextVariableOrNull(name) ?? throw new ArgumentException(
+ $"Unable to find variable '{name}' in " +
+ $"test context variable file '{_testContextVariableFilePath}'");
+ }
+
+ public string GetTestContextVariableOrNull(string name)
+ {
+ // Allow env var override, although normally the test context variables file is used.
+ // Don't accept NUGET_PACKAGES env override specifically: Arcade sets this and it leaks
+ // in during build.cmd/sh runs, replacing the test-specific dir.
+ if (!name.Equals("NUGET_PACKAGES", StringComparison.OrdinalIgnoreCase))
+ {
+ if (Environment.GetEnvironmentVariable(name) is string envValue)
+ {
+ return envValue;
+ }
+ }
+
+ if (_testContextVariables.TryGetValue(name, out string value))
+ {
+ return value;
+ }
+
+ return null;
+ }
+
+ private static string GetRepoRootDirectory()
+ {
+ string currentDirectory = Directory.GetCurrentDirectory();
+
+ while (currentDirectory != null)
+ {
+ string gitDirOrFile = Path.Combine(currentDirectory, ".git");
+ if (Directory.Exists(gitDirOrFile) || File.Exists(gitDirOrFile))
+ {
+ break;
+ }
+ currentDirectory = Directory.GetParent(currentDirectory)?.FullName;
+ }
+
+ if (currentDirectory == null)
+ {
+ throw new Exception("Cannot find the git repository root");
+ }
+
+ return currentDirectory;
+ }
+ }
+}
diff --git a/src/windowsdesktop/TestUtils/TestUtils.csproj b/src/windowsdesktop/TestUtils/TestUtils.csproj
new file mode 100644
index 000000000..328948ef0
--- /dev/null
+++ b/src/windowsdesktop/TestUtils/TestUtils.csproj
@@ -0,0 +1,10 @@
+
+
+
+ $(TestInfraTargetFramework)
+ true
+ TestUtils
+ TestUtils
+
+
+
diff --git a/src/windowsdesktop/tests/NuGetArtifactTester.cs b/src/windowsdesktop/tests/NuGetArtifactTester.cs
new file mode 100644
index 000000000..b46b78150
--- /dev/null
+++ b/src/windowsdesktop/tests/NuGetArtifactTester.cs
@@ -0,0 +1,205 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.DotNet.CoreSetup.Test;
+using NuGet.Packaging;
+using NuGet.Packaging.Core;
+using NuGet.Versioning;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.IO.Compression;
+using System.Linq;
+using System.Xml.Linq;
+using Xunit;
+
+namespace Microsoft.DotNet.CoreSetup.Packaging.Tests
+{
+ public class NuGetArtifactTester : IDisposable
+ {
+ public static NuGetArtifactTester Open(
+ RepoDirectoriesProvider dirs,
+ string project,
+ string id = null)
+ {
+ var tester = OpenOrNull(dirs, project, id);
+ Assert.NotNull(tester);
+ return tester;
+ }
+
+ public static NuGetArtifactTester OpenOrNull(
+ RepoDirectoriesProvider dirs,
+ string project,
+ string id = null)
+ {
+ id = id ?? project;
+
+ string nuspecPath = Path.Combine(
+ dirs.BaseArtifactsFolder,
+ "packages",
+ dirs.Configuration,
+ "specs",
+ $"{id}.nuspec");
+
+ if (!File.Exists(nuspecPath))
+ {
+ return null;
+ }
+
+ PackageIdentity builtIdentity = new NuspecReader(nuspecPath).GetIdentity();
+
+ string nupkgPath = Path.Combine(
+ dirs.BaseArtifactsFolder,
+ "packages",
+ dirs.Configuration,
+ "Shipping",
+ $"{builtIdentity}.nupkg");
+
+ // If the nuspec exists, the nupkg should exist.
+ Assert.True(File.Exists(nupkgPath));
+
+ return new NuGetArtifactTester(nupkgPath);
+ }
+
+ public PackageIdentity Identity { get; }
+ public NuGetVersion PackageVersion { get; }
+
+ private readonly PackageArchiveReader _reader;
+
+ public NuGetArtifactTester(string file)
+ {
+ _reader = new PackageArchiveReader(ZipFile.Open(file, ZipArchiveMode.Read));
+ Identity = _reader.NuspecReader.GetIdentity();
+ PackageVersion = _reader.NuspecReader.GetVersion();
+ }
+
+ public void Dispose()
+ {
+ _reader.Dispose();
+ }
+
+ public void IsTargetingPack()
+ {
+ IsFrameworkPack();
+
+ Assert.NotEmpty(_reader.GetFiles("ref"));
+ Assert.Empty(_reader.GetFiles("runtimes"));
+ Assert.Empty(_reader.GetFiles("lib"));
+
+ ContainsFrameworkList("FrameworkList.xml");
+ }
+
+ public void IsTargetingPackForPlatform()
+ {
+ IsFrameworkPack();
+
+ HasGoodPlatformManifest();
+ }
+
+ public void IsAppHostPack()
+ {
+ IsRuntimeSpecificPack();
+ }
+
+ public void IsRuntimePack()
+ {
+ IsRuntimeSpecificPack();
+
+ HasOnlyTheseDataFiles(
+ "data/RuntimeList.xml",
+ "data/PlatformManifest.txt");
+
+ HasGoodPlatformManifest();
+
+ ContainsFrameworkList("RuntimeList.xml");
+ }
+
+ public void HasOnlyTheseDataFiles(params string[] expectedDataFiles)
+ {
+ HashSet dataFileSet = _reader.GetFiles("data").ToHashSet();
+
+ Assert.True(
+ dataFileSet.SetEquals(expectedDataFiles),
+ "Invalid set of data files: " +
+ $"expected '{string.Join(", ", expectedDataFiles)}', " +
+ $"actual '{string.Join(", ", dataFileSet)}'");
+ }
+
+ public void HasGoodPlatformManifest()
+ {
+ string platformManifestContent = ReadEntryContent(
+ _reader.GetEntry("data/PlatformManifest.txt"));
+
+ // Sanity: check if the manifest has some content.
+ Assert.Contains(".dll", platformManifestContent);
+
+ // Check that the lines contain the package ID where they're supposed to.
+ foreach (var parts in platformManifestContent
+ .Split('\r', '\n')
+ .Select(line => line.Split("|"))
+ .Where(parts => parts.Length > 1))
+ {
+ Assert.True(
+ parts[1] == Identity.Id,
+ $"Platform manifest package id column '{parts[1]}' doesn't match " +
+ $"actual package id '{Identity.Id}'");
+ }
+ }
+
+ public string ReadEntryContent(string entry)
+ {
+ return ReadEntryContent(_reader.GetEntry(entry));
+ }
+
+ public XDocument ReadEntryXDocument(string entry)
+ {
+ return ReadEntryXDocument(_reader.GetEntry(entry));
+ }
+
+ private void IsFrameworkPack()
+ {
+ Assert.Empty(_reader.GetPackageDependencies());
+
+ var expectedTypes = new[] { new PackageType("DotnetPlatform", new Version(0, 0)) };
+ var types = _reader.GetPackageTypes().ToArray();
+ Assert.Equal(expectedTypes, types);
+ }
+
+ private void IsRuntimeSpecificPack()
+ {
+ IsFrameworkPack();
+
+ Assert.Empty(_reader.GetFiles("ref"));
+ Assert.NotEmpty(_reader.GetFiles("runtimes"));
+ Assert.Empty(_reader.GetFiles("lib"));
+ }
+
+ private void ContainsFrameworkList(string name)
+ {
+ XDocument frameworkList = ReadEntryXDocument(
+ _reader.GetEntry($"data/{name}"));
+
+ XElement[] frameworkListFiles = frameworkList
+ .Element("FileList")
+ .Elements("File")
+ .ToArray();
+
+ // Sanity: check if the list has some content.
+ Assert.NotEmpty(frameworkListFiles);
+ }
+
+ private static string ReadEntryContent(ZipArchiveEntry entry)
+ {
+ using (var reader = new StreamReader(entry.Open()))
+ {
+ return reader.ReadToEnd();
+ }
+ }
+
+ private static XDocument ReadEntryXDocument(ZipArchiveEntry entry)
+ {
+ return XDocument.Parse(ReadEntryContent(entry));
+ }
+ }
+}
diff --git a/src/windowsdesktop/tests/WindowsDesktopNupkgTests.cs b/src/windowsdesktop/tests/WindowsDesktopNupkgTests.cs
new file mode 100644
index 000000000..dcf5fb973
--- /dev/null
+++ b/src/windowsdesktop/tests/WindowsDesktopNupkgTests.cs
@@ -0,0 +1,96 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.DotNet.CoreSetup.Test;
+using System;
+using System.Linq;
+using System.Xml.Linq;
+using Xunit;
+
+namespace Microsoft.DotNet.CoreSetup.Packaging.Tests
+{
+ public class WindowsDesktopNupkgTests
+ {
+ private readonly RepoDirectoriesProvider dirs = new RepoDirectoriesProvider();
+
+ [Fact]
+ public void WindowsDesktopTargetingPackIsValid()
+ {
+ // Use "OrNull" variant to get null if this nupkg doesn't exist. WindowsDesktop is only
+ // built on officially supported platforms.
+ using (var tester = NuGetArtifactTester.OpenOrNull(
+ dirs,
+ "Microsoft.WindowsDesktop.App.Ref"))
+ {
+ if (CurrentRidShouldCreateNupkg)
+ {
+ Assert.NotNull(tester);
+
+ tester.IsTargetingPackForPlatform();
+ tester.HasOnlyTheseDataFiles(
+ "data/FrameworkList.xml",
+ "data/PlatformManifest.txt");
+ }
+ else
+ {
+ Assert.Null(tester);
+ }
+ }
+ }
+
+ [Fact]
+ public void WindowsDesktopFrameworkListHasClassifications()
+ {
+ using (var tester = NuGetArtifactTester.OpenOrNull(
+ dirs,
+ "Microsoft.WindowsDesktop.App.Ref"))
+ {
+ // Let other test case handle if this is OK.
+ if (tester == null)
+ {
+ return;
+ }
+
+ XDocument fxList = tester.ReadEntryXDocument("data/FrameworkList.xml");
+ var files = fxList.Element("FileList").Elements("File").ToArray();
+
+ // Sanity check: did any elements we expect make it into the final file?
+ foreach (var attributeName in new[] { "Profile" })
+ {
+ Assert.True(
+ files.Any(x => !string.IsNullOrEmpty(x.Attribute(attributeName)?.Value)),
+ $"Can't find a non-empty '{attributeName}' attribute in framework list.");
+ }
+ }
+ }
+
+ [Fact]
+ public void WindowsDesktopRuntimePackIsValid()
+ {
+ using (var tester = NuGetArtifactTester.OpenOrNull(
+ dirs,
+ "Microsoft.WindowsDesktop.App.Runtime",
+ $"Microsoft.WindowsDesktop.App.Runtime.{dirs.BuildRID}"))
+ {
+ if (CurrentRidShouldCreateNupkg)
+ {
+ Assert.NotNull(tester);
+
+ tester.IsRuntimePack();
+ }
+ else
+ {
+ Assert.Null(tester);
+ }
+ }
+ }
+
+ private bool CurrentRidShouldCreateNupkg =>
+ new[]
+ {
+ "win-x64",
+ "win-x86"
+ }.Contains(dirs.BuildRID);
+ }
+}
diff --git a/src/windowsdesktop/tests/windowsdesktop.Tests.csproj b/src/windowsdesktop/tests/windowsdesktop.Tests.csproj
new file mode 100644
index 000000000..45cb5d57b
--- /dev/null
+++ b/src/windowsdesktop/tests/windowsdesktop.Tests.csproj
@@ -0,0 +1,37 @@
+
+
+
+ $(TestInfraTargetFramework)
+
+ false
+
+
+
+ 7.1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools-local/Directory.Build.props b/tools-local/Directory.Build.props
new file mode 100644
index 000000000..cd6ba441d
--- /dev/null
+++ b/tools-local/Directory.Build.props
@@ -0,0 +1,9 @@
+
+
+
+
+
+ true
+
+
+
\ No newline at end of file
diff --git a/tools-local/regenerate-readme-table.proj b/tools-local/regenerate-readme-table.proj
new file mode 100644
index 000000000..c7e36ebf8
--- /dev/null
+++ b/tools-local/regenerate-readme-table.proj
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools-local/tasks/BuildTask.cs b/tools-local/tasks/BuildTask.cs
new file mode 100644
index 000000000..979a99edf
--- /dev/null
+++ b/tools-local/tasks/BuildTask.cs
@@ -0,0 +1,37 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using Microsoft.Build.Utilities;
+
+namespace Microsoft.DotNet.Build.Tasks
+{
+ public abstract partial class BuildTask : ITask
+ {
+ private TaskLoggingHelper _log = null;
+
+ internal TaskLoggingHelper Log
+ {
+ get { return _log ?? (_log = new TaskLoggingHelper(this)); }
+ }
+
+ public BuildTask()
+ {
+ }
+
+ public IBuildEngine BuildEngine
+ {
+ get;
+ set;
+ }
+
+ public ITaskHost HostObject
+ {
+ get;
+ set;
+ }
+
+ public abstract bool Execute();
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureBlobLease.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureBlobLease.cs
new file mode 100644
index 000000000..d604988c8
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureBlobLease.cs
@@ -0,0 +1,183 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public partial class AzureBlobLease
+ {
+ private string _containerName;
+ private string _blobName;
+ private TimeSpan _maxWait;
+ private TimeSpan _delay;
+ private const int s_MaxWaitDefault = 60; // seconds
+ private const int s_DelayDefault = 500; // milliseconds
+ private CancellationTokenSource _cancellationTokenSource;
+ private Task _leaseRenewalTask;
+ private string _connectionString;
+ private string _accountName;
+ private string _accountKey;
+ private Microsoft.Build.Utilities.TaskLoggingHelper _log;
+ private string _leaseId;
+ private string _leaseUrl;
+
+ public AzureBlobLease(string accountName, string accountKey, string connectionString, string containerName, string blobName, Microsoft.Build.Utilities.TaskLoggingHelper log, string maxWait = null, string delay = null)
+ {
+ _accountName = accountName;
+ _accountKey = accountKey;
+ _connectionString = connectionString;
+ _containerName = containerName;
+ _blobName = blobName;
+ _maxWait = !string.IsNullOrWhiteSpace(maxWait) ? TimeSpan.Parse(maxWait) : TimeSpan.FromSeconds(s_MaxWaitDefault);
+ _delay = !string.IsNullOrWhiteSpace(delay) ? TimeSpan.Parse(delay) : TimeSpan.FromMilliseconds(s_DelayDefault);
+ _log = log;
+ _leaseUrl = $"{AzureHelper.GetBlobRestUrl(_accountName, _containerName, _blobName)}?comp=lease";
+ }
+
+ public string Acquire()
+ {
+ Stopwatch stopWatch = new Stopwatch();
+ stopWatch.Start();
+
+ while (stopWatch.ElapsedMilliseconds < _maxWait.TotalMilliseconds)
+ {
+ try
+ {
+ string leaseId = AcquireLeaseOnBlobAsync().GetAwaiter().GetResult();
+ _cancellationTokenSource = new CancellationTokenSource();
+ _leaseRenewalTask = Task.Run(() =>
+ { AutoRenewLeaseOnBlob(this, _log); },
+ _cancellationTokenSource.Token);
+ _leaseId = leaseId;
+ return _leaseId;
+ }
+ catch (Exception e)
+ {
+ _log.LogMessage($"Retrying lease acquisition on {_blobName}, {e.Message}");
+ Thread.Sleep(_delay);
+ }
+ }
+ ResetLeaseRenewalTaskState();
+ throw new Exception($"Unable to acquire lease on {_blobName}");
+
+ }
+
+ public void Release()
+ {
+ // Cancel the lease renewal task since we are about to release the lease.
+ ResetLeaseRenewalTaskState();
+
+ using (HttpClient client = new HttpClient())
+ {
+ Tuple leaseAction = new Tuple("x-ms-lease-action", "release");
+ Tuple headerLeaseId = new Tuple("x-ms-lease-id", _leaseId);
+ List> additionalHeaders = new List>() { leaseAction, headerLeaseId };
+ var request = AzureHelper.RequestMessage("PUT", _leaseUrl, _accountName, _accountKey, additionalHeaders);
+ using (HttpResponseMessage response = AzureHelper.RequestWithRetry(_log, client, request).GetAwaiter().GetResult())
+ {
+ if (!response.IsSuccessStatusCode)
+ {
+ _log.LogMessage($"Unable to release lease on container/blob {_containerName}/{_blobName}.");
+ }
+ }
+ }
+ }
+
+ private async Task AcquireLeaseOnBlobAsync()
+ {
+ _log.LogMessage(MessageImportance.Low, $"Requesting lease for container/blob '{_containerName}/{_blobName}'.");
+ string leaseId = string.Empty;
+ using (HttpClient client = new HttpClient())
+ {
+ try
+ {
+ Tuple leaseAction = new Tuple("x-ms-lease-action", "acquire");
+ Tuple leaseDuration = new Tuple("x-ms-lease-duration", "60" /* seconds */);
+ List> additionalHeaders = new List>() { leaseAction, leaseDuration };
+ var request = AzureHelper.RequestMessage("PUT", _leaseUrl, _accountName, _accountKey, additionalHeaders);
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(_log, client, request))
+ {
+ leaseId = response.Headers.GetValues("x-ms-lease-id").FirstOrDefault();
+ }
+ }
+ catch (Exception e)
+ {
+ _log.LogErrorFromException(e, true);
+ }
+ }
+
+ return leaseId;
+ }
+ private static void AutoRenewLeaseOnBlob(AzureBlobLease instance, Microsoft.Build.Utilities.TaskLoggingHelper log)
+ {
+ TimeSpan maxWait = TimeSpan.FromSeconds(s_MaxWaitDefault);
+ TimeSpan delay = TimeSpan.FromMilliseconds(s_DelayDefault);
+ TimeSpan waitFor = maxWait;
+ CancellationToken token = instance._cancellationTokenSource.Token;
+
+ while (true)
+ {
+ token.ThrowIfCancellationRequested();
+
+ try
+ {
+ log.LogMessage(MessageImportance.Low, $"Requesting lease for container/blob '{instance._containerName}/{instance._blobName}'.");
+ using (HttpClient client = new HttpClient())
+ {
+ Tuple leaseAction = new Tuple("x-ms-lease-action", "renew");
+ Tuple headerLeaseId = new Tuple("x-ms-lease-id", instance._leaseId);
+ List> additionalHeaders = new List>() { leaseAction, headerLeaseId };
+ var request = AzureHelper.RequestMessage("PUT", instance._leaseUrl, instance._accountName, instance._accountKey, additionalHeaders);
+ using (HttpResponseMessage response = AzureHelper.RequestWithRetry(log, client, request).GetAwaiter().GetResult())
+ {
+ if (!response.IsSuccessStatusCode)
+ {
+ throw new Exception("Unable to acquire lease.");
+ }
+ }
+ }
+ waitFor = maxWait;
+ }
+ catch (Exception e)
+ {
+ Console.WriteLine($"Rerying lease renewal on {instance._containerName}, {e.Message}");
+ waitFor = delay;
+ }
+ token.ThrowIfCancellationRequested();
+
+ Task.Delay(waitFor, token).Wait();
+ }
+ }
+
+ private void ResetLeaseRenewalTaskState()
+ {
+ // Cancel the lease renewal task if it was created
+ if (_leaseRenewalTask != null)
+ {
+ _cancellationTokenSource.Cancel();
+
+ // Block until the task ends. It can throw if we cancelled it before it completed.
+ try
+ {
+ _leaseRenewalTask.Wait();
+ }
+ catch (Exception)
+ {
+ // Ignore the caught exception as it will be expected.
+ }
+
+ _leaseRenewalTask = null;
+ }
+ }
+
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureConnectionStringBuildTask.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureConnectionStringBuildTask.cs
new file mode 100644
index 000000000..d309f9235
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureConnectionStringBuildTask.cs
@@ -0,0 +1,61 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Utilities;
+using System.Text.RegularExpressions;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public abstract class AzureConnectionStringBuildTask : Task
+ {
+ ///
+ /// Azure Storage account connection string. Supersedes Account Key / Name.
+ /// Will cause errors if both are set.
+ ///
+ public string ConnectionString { get; set; }
+
+ ///
+ /// The Azure account key used when creating the connection string.
+ /// When we fully deprecate these, can just make them get; only.
+ ///
+ public string AccountKey { get; set; }
+
+ ///
+ /// The Azure account name used when creating the connection string.
+ /// When we fully deprecate these, can just make them get; only.
+ ///
+ public string AccountName { get; set; }
+
+ public void ParseConnectionString()
+ {
+ if (!string.IsNullOrEmpty(ConnectionString))
+ {
+ if (!(string.IsNullOrEmpty(AccountKey) && string.IsNullOrEmpty(AccountName)))
+ {
+ Log.LogError("If the ConnectionString property is set, you must not provide AccountKey / AccountName. These values will be deprecated in the future.");
+ }
+ else
+ {
+ Regex storageConnectionStringRegex = new Regex("AccountName=(?.+?);AccountKey=(?.+?);");
+
+ MatchCollection matches = storageConnectionStringRegex.Matches(ConnectionString);
+ if (matches.Count > 0)
+ {
+ // When we deprecate this format, we'll want to demote these to private
+ AccountName = matches[0].Groups["name"].Value;
+ AccountKey = matches[0].Groups["key"].Value;
+ }
+ else
+ {
+ Log.LogError("Error parsing connection string. Please review its value.");
+ }
+ }
+ }
+ else if (string.IsNullOrEmpty(AccountKey) || string.IsNullOrEmpty(AccountName))
+ {
+ Log.LogError("Error, must provide either ConnectionString or AccountName with AccountKey");
+ }
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureHelper.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureHelper.cs
new file mode 100644
index 000000000..6b9e8524b
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/AzureHelper.cs
@@ -0,0 +1,461 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using Microsoft.Build.Utilities;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Security.Cryptography;
+using System.Text;
+using System.Text.RegularExpressions;
+using System.Threading.Tasks;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public static class AzureHelper
+ {
+ ///
+ /// The storage api version.
+ ///
+ public static readonly string StorageApiVersion = "2015-04-05";
+ public const string DateHeaderString = "x-ms-date";
+ public const string VersionHeaderString = "x-ms-version";
+ public const string AuthorizationHeaderString = "Authorization";
+ public const string CacheControlString = "x-ms-blob-cache-control";
+ public const string ContentTypeString = "x-ms-blob-content-type";
+
+ public enum SasAccessType
+ {
+ Read,
+ Write,
+ };
+
+ public static string AuthorizationHeader(
+ string storageAccount,
+ string storageKey,
+ string method,
+ DateTime now,
+ HttpRequestMessage request,
+ string ifMatch = "",
+ string contentMD5 = "",
+ string size = "",
+ string contentType = "")
+ {
+ string stringToSign = string.Format(
+ "{0}\n\n\n{1}\n{5}\n{6}\n\n\n{2}\n\n\n\n{3}{4}",
+ method,
+ (size == string.Empty) ? string.Empty : size,
+ ifMatch,
+ GetCanonicalizedHeaders(request),
+ GetCanonicalizedResource(request.RequestUri, storageAccount),
+ contentMD5,
+ contentType);
+ byte[] signatureBytes = Encoding.UTF8.GetBytes(stringToSign);
+ string authorizationHeader;
+ using (HMACSHA256 hmacsha256 = new HMACSHA256(Convert.FromBase64String(storageKey)))
+ {
+ authorizationHeader = "SharedKey " + storageAccount + ":"
+ + Convert.ToBase64String(hmacsha256.ComputeHash(signatureBytes));
+ }
+
+ return authorizationHeader;
+ }
+
+ public static string CreateContainerSasToken(
+ string accountName,
+ string containerName,
+ string key,
+ SasAccessType accessType,
+ int validityTimeInDays)
+ {
+ string signedPermissions = string.Empty;
+ switch (accessType)
+ {
+ case SasAccessType.Read:
+ signedPermissions = "r";
+ break;
+ case SasAccessType.Write:
+ signedPermissions = "wdl";
+ break;
+ default:
+ throw new ArgumentOutOfRangeException(nameof(accessType), accessType, "Unrecognized value");
+ }
+
+ string signedStart = DateTime.UtcNow.ToString("O");
+ string signedExpiry = DateTime.UtcNow.AddDays(validityTimeInDays).ToString("O");
+ string canonicalizedResource = "/blob/" + accountName + "/" + containerName;
+ string signedIdentifier = string.Empty;
+ string signedVersion = StorageApiVersion;
+
+ string stringToSign = ConstructServiceStringToSign(
+ signedPermissions,
+ signedVersion,
+ signedExpiry,
+ canonicalizedResource,
+ signedIdentifier,
+ signedStart);
+
+ byte[] signatureBytes = Encoding.UTF8.GetBytes(stringToSign);
+ string signature;
+ using (HMACSHA256 hmacSha256 = new HMACSHA256(Convert.FromBase64String(key)))
+ {
+ signature = Convert.ToBase64String(hmacSha256.ComputeHash(signatureBytes));
+ }
+
+ string sasToken = string.Format(
+ "?sv={0}&sr={1}&sig={2}&st={3}&se={4}&sp={5}",
+ WebUtility.UrlEncode(signedVersion),
+ WebUtility.UrlEncode("c"),
+ WebUtility.UrlEncode(signature),
+ WebUtility.UrlEncode(signedStart),
+ WebUtility.UrlEncode(signedExpiry),
+ WebUtility.UrlEncode(signedPermissions));
+
+ return sasToken;
+ }
+
+ public static string GetCanonicalizedHeaders(HttpRequestMessage request)
+ {
+ StringBuilder sb = new StringBuilder();
+ List headerNameList = (from headerName in request.Headers
+ where
+ headerName.Key.ToLowerInvariant()
+ .StartsWith("x-ms-", StringComparison.Ordinal)
+ select headerName.Key.ToLowerInvariant()).ToList();
+ headerNameList.Sort();
+ foreach (string headerName in headerNameList)
+ {
+ StringBuilder builder = new StringBuilder(headerName);
+ string separator = ":";
+ foreach (string headerValue in GetHeaderValues(request.Headers, headerName))
+ {
+ string trimmedValue = headerValue.Replace("\r\n", string.Empty);
+ builder.Append(separator);
+ builder.Append(trimmedValue);
+ separator = ",";
+ }
+
+ sb.Append(builder);
+ sb.Append("\n");
+ }
+
+ return sb.ToString();
+ }
+
+ public static string GetCanonicalizedResource(Uri address, string accountName)
+ {
+ StringBuilder str = new StringBuilder();
+ StringBuilder builder = new StringBuilder("/");
+ builder.Append(accountName);
+ builder.Append(address.AbsolutePath);
+ str.Append(builder);
+ Dictionary> queryKeyValues = ExtractQueryKeyValues(address);
+ Dictionary> dictionary = GetCommaSeparatedList(queryKeyValues);
+
+ foreach (KeyValuePair> pair in dictionary.OrderBy(p => p.Key))
+ {
+ StringBuilder stringBuilder = new StringBuilder(string.Empty);
+ stringBuilder.Append(pair.Key + ":");
+ string commaList = string.Join(",", pair.Value);
+ stringBuilder.Append(commaList);
+ str.Append("\n");
+ str.Append(stringBuilder);
+ }
+
+ return str.ToString();
+ }
+
+ public static List GetHeaderValues(HttpRequestHeaders headers, string headerName)
+ {
+ List list = new List();
+ IEnumerable values;
+ headers.TryGetValues(headerName, out values);
+ if (values != null)
+ {
+ list.Add((values.FirstOrDefault() ?? string.Empty).TrimStart(null));
+ }
+
+ return list;
+ }
+
+ private static bool IsWithinRetryRange(HttpStatusCode statusCode)
+ {
+ // Retry on http client and server error codes (4xx - 5xx) as well as redirect
+
+ var rawStatus = (int)statusCode;
+ if (rawStatus == 302)
+ return true;
+ else if (rawStatus >= 400 && rawStatus <= 599)
+ return true;
+ else
+ return false;
+ }
+
+ public static async Task RequestWithRetry(TaskLoggingHelper loggingHelper, HttpClient client,
+ Func createRequest, Func validationCallback = null, int retryCount = 5,
+ int retryDelaySeconds = 5)
+ {
+ if (loggingHelper == null)
+ throw new ArgumentNullException(nameof(loggingHelper));
+ if (client == null)
+ throw new ArgumentNullException(nameof(client));
+ if (createRequest == null)
+ throw new ArgumentNullException(nameof(createRequest));
+ if (retryCount < 1)
+ throw new ArgumentException(nameof(retryCount));
+ if (retryDelaySeconds < 1)
+ throw new ArgumentException(nameof(retryDelaySeconds));
+
+ int retries = 0;
+ HttpResponseMessage response = null;
+
+ // add a bit of randomness to the retry delay
+ var rng = new Random();
+
+ while (retries < retryCount)
+ {
+ if (retries > 0)
+ {
+ if (response != null)
+ {
+ response.Dispose();
+ response = null;
+ }
+
+ int delay = retryDelaySeconds * retries * rng.Next(1, 5);
+ loggingHelper.LogMessage(MessageImportance.Low, "Waiting {0} seconds before retry", delay);
+ await System.Threading.Tasks.Task.Delay(delay * 1000);
+ }
+
+ try
+ {
+ using (var request = createRequest())
+ response = await client.SendAsync(request);
+ }
+ catch (Exception e)
+ {
+ loggingHelper.LogWarningFromException(e, true);
+
+ // if this is the final iteration let the exception bubble up
+ if (retries + 1 == retryCount)
+ throw;
+ }
+
+ // response can be null if we fail to send the request
+ if (response != null)
+ {
+ if (validationCallback == null)
+ {
+ // check if the response code is within the range of failures
+ if (!IsWithinRetryRange(response.StatusCode))
+ {
+ return response;
+ }
+ }
+ else
+ {
+ bool isSuccess = validationCallback(response);
+ if (!isSuccess)
+ {
+ loggingHelper.LogMessage("Validation callback returned retry for status code {0}", response.StatusCode);
+ }
+ else
+ {
+ loggingHelper.LogMessage("Validation callback returned success for status code {0}", response.StatusCode);
+ return response;
+ }
+ }
+ }
+
+ ++retries;
+ }
+
+ // retry count exceeded
+ loggingHelper.LogWarning("Retry count {0} exceeded", retryCount);
+
+ // set some default values in case response is null
+ var statusCode = "None";
+ var contentStr = "Null";
+ if (response != null)
+ {
+ statusCode = response.StatusCode.ToString();
+ contentStr = await response.Content.ReadAsStringAsync();
+ response.Dispose();
+ }
+
+ throw new HttpRequestException($"Request {createRequest().RequestUri} failed with status {statusCode}. Response : {contentStr}");
+ }
+
+ private static string ConstructServiceStringToSign(
+ string signedPermissions,
+ string signedVersion,
+ string signedExpiry,
+ string canonicalizedResource,
+ string signedIdentifier,
+ string signedStart,
+ string signedIP = "",
+ string signedProtocol = "",
+ string rscc = "",
+ string rscd = "",
+ string rsce = "",
+ string rscl = "",
+ string rsct = "")
+ {
+ // constructing string to sign based on spec in https://msdn.microsoft.com/en-us/library/azure/dn140255.aspx
+ var stringToSign = string.Join(
+ "\n",
+ signedPermissions,
+ signedStart,
+ signedExpiry,
+ canonicalizedResource,
+ signedIdentifier,
+ signedIP,
+ signedProtocol,
+ signedVersion,
+ rscc,
+ rscd,
+ rsce,
+ rscl,
+ rsct);
+ return stringToSign;
+ }
+
+ private static Dictionary> ExtractQueryKeyValues(Uri address)
+ {
+ Dictionary> values = new Dictionary>();
+ //Decode this to allow the regex to pull out the correct groups for signing
+ address = new Uri(WebUtility.UrlDecode(address.ToString()));
+ Regex newreg = new Regex(@"(?:\?|&)([^=]+)=([^&]+)");
+ MatchCollection matches = newreg.Matches(address.Query);
+ foreach (Match match in matches)
+ {
+ string key, value;
+ if (!string.IsNullOrEmpty(match.Groups[1].Value))
+ {
+ key = match.Groups[1].Value;
+ value = match.Groups[2].Value;
+ }
+ else
+ {
+ key = match.Groups[3].Value;
+ value = match.Groups[4].Value;
+ }
+
+ HashSet setOfValues;
+ if (values.TryGetValue(key, out setOfValues))
+ {
+ setOfValues.Add(value);
+ }
+ else
+ {
+ HashSet newSet = new HashSet { value };
+ values.Add(key, newSet);
+ }
+ }
+
+ return values;
+ }
+
+ private static Dictionary> GetCommaSeparatedList(
+ Dictionary> queryKeyValues)
+ {
+ Dictionary> dictionary = new Dictionary>();
+
+ foreach (string queryKeys in queryKeyValues.Keys)
+ {
+ HashSet setOfValues;
+ queryKeyValues.TryGetValue(queryKeys, out setOfValues);
+ List list = new List();
+ list.AddRange(setOfValues);
+ list.Sort();
+ string commaSeparatedValues = string.Join(",", list);
+ string key = queryKeys.ToLowerInvariant();
+ HashSet setOfValues2;
+ if (dictionary.TryGetValue(key, out setOfValues2))
+ {
+ setOfValues2.Add(commaSeparatedValues);
+ }
+ else
+ {
+ HashSet newSet = new HashSet { commaSeparatedValues };
+ dictionary.Add(key, newSet);
+ }
+ }
+
+ return dictionary;
+ }
+
+ public static Func RequestMessage(string method, string url, string accountName, string accountKey, List> additionalHeaders = null, string body = null)
+ {
+ Func requestFunc = () =>
+ {
+ HttpMethod httpMethod = HttpMethod.Get;
+ if (method == "PUT")
+ {
+ httpMethod = HttpMethod.Put;
+ }
+ else if (method == "DELETE")
+ {
+ httpMethod = HttpMethod.Delete;
+ }
+ DateTime dateTime = DateTime.UtcNow;
+ var request = new HttpRequestMessage(httpMethod, url);
+ request.Headers.Add(AzureHelper.DateHeaderString, dateTime.ToString("R", CultureInfo.InvariantCulture));
+ request.Headers.Add(AzureHelper.VersionHeaderString, AzureHelper.StorageApiVersion);
+ if (additionalHeaders != null)
+ {
+ foreach (Tuple additionalHeader in additionalHeaders)
+ {
+ request.Headers.Add(additionalHeader.Item1, additionalHeader.Item2);
+ }
+ }
+ if (body != null)
+ {
+ request.Content = new StringContent(body);
+ request.Headers.Add(AzureHelper.AuthorizationHeaderString, AzureHelper.AuthorizationHeader(
+ accountName,
+ accountKey,
+ method,
+ dateTime,
+ request,
+ "",
+ "",
+ request.Content.Headers.ContentLength.ToString(),
+ request.Content.Headers.ContentType.ToString()));
+ }
+ else
+ {
+ request.Headers.Add(AzureHelper.AuthorizationHeaderString, AzureHelper.AuthorizationHeader(
+ accountName,
+ accountKey,
+ method,
+ dateTime,
+ request));
+ }
+ return request;
+ };
+ return requestFunc;
+ }
+
+ public static string GetRootRestUrl(string accountName)
+ {
+ return $"https://{accountName}.blob.core.windows.net";
+ }
+
+ public static string GetContainerRestUrl(string accountName, string containerName)
+ {
+ return $"{GetRootRestUrl(accountName)}/{containerName}";
+ }
+
+ public static string GetBlobRestUrl(string accountName, string containerName, string blob)
+ {
+ return $"{GetContainerRestUrl(accountName, containerName)}/{blob}";
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/CopyAzureBlobToBlob.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/CopyAzureBlobToBlob.cs
new file mode 100644
index 000000000..2227bfc00
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/CopyAzureBlobToBlob.cs
@@ -0,0 +1,104 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Threading.Tasks;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public partial class CopyAzureBlobToBlob : AzureConnectionStringBuildTask
+ {
+ [Required]
+ public string ContainerName { get; set; }
+ [Required]
+ public string SourceBlobName { get; set; }
+ [Required]
+ public string DestinationBlobName { get; set; }
+
+ public override bool Execute()
+ {
+ return ExecuteAsync().GetAwaiter().GetResult();
+ }
+
+ public async Task ExecuteAsync()
+ {
+ ParseConnectionString();
+ if (Log.HasLoggedErrors)
+ {
+ return false;
+ }
+
+ string sourceUrl = AzureHelper.GetBlobRestUrl(AccountName, ContainerName, SourceBlobName);
+ string destinationUrl = AzureHelper.GetBlobRestUrl(AccountName, ContainerName, DestinationBlobName);
+ using (HttpClient client = new HttpClient())
+ {
+ try
+ {
+ Tuple leaseAction = new Tuple("x-ms-lease-action", "acquire");
+ Tuple leaseDuration = new Tuple("x-ms-lease-duration", "60" /* seconds */);
+ Tuple headerSource = new Tuple("x-ms-copy-source", sourceUrl);
+ List> additionalHeaders = new List>() { leaseAction, leaseDuration, headerSource };
+ var request = AzureHelper.RequestMessage("PUT", destinationUrl, AccountName, AccountKey, additionalHeaders);
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(Log, client, request))
+ {
+ if (response.IsSuccessStatusCode)
+ {
+ return true;
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ Log.LogErrorFromException(e, true);
+ }
+ }
+ return false;
+ }
+ public static bool Execute(string accountName,
+ string accountKey,
+ string connectionString,
+ string containerName,
+ string sourceBlobName,
+ string destinationBlobName,
+ IBuildEngine buildengine,
+ ITaskHost taskHost)
+ {
+ CopyAzureBlobToBlob copyAzureBlobToBlob = new CopyAzureBlobToBlob()
+ {
+ AccountName = accountName,
+ AccountKey = accountKey,
+ ContainerName = containerName,
+ SourceBlobName = sourceBlobName,
+ DestinationBlobName = destinationBlobName,
+ BuildEngine = buildengine,
+ HostObject = taskHost
+ };
+ return copyAzureBlobToBlob.Execute();
+ }
+ public static Task ExecuteAsync(string accountName,
+ string accountKey,
+ string connectionString,
+ string containerName,
+ string sourceBlobName,
+ string destinationBlobName,
+ IBuildEngine buildengine,
+ ITaskHost taskHost)
+ {
+ CopyAzureBlobToBlob copyAzureBlobToBlob = new CopyAzureBlobToBlob()
+ {
+ AccountName = accountName,
+ AccountKey = accountKey,
+ ContainerName = containerName,
+ SourceBlobName = sourceBlobName,
+ DestinationBlobName = destinationBlobName,
+ BuildEngine = buildengine,
+ HostObject = taskHost
+ };
+ return copyAzureBlobToBlob.ExecuteAsync();
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DeleteAzureBlob.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DeleteAzureBlob.cs
new file mode 100644
index 000000000..4d8efa029
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DeleteAzureBlob.cs
@@ -0,0 +1,70 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public partial class DeleteAzureBlob: AzureConnectionStringBuildTask
+ {
+ [Required]
+ public string ContainerName { get; set; }
+ [Required]
+ public string BlobName { get; set; }
+
+ public override bool Execute()
+ {
+ ParseConnectionString();
+ if (Log.HasLoggedErrors)
+ {
+ return false;
+ }
+
+ string deleteUrl = $"https://{AccountName}.blob.core.windows.net/{ContainerName}/{BlobName}";
+
+ using (HttpClient client = new HttpClient())
+ {
+ try
+ {
+ Tuple snapshots = new Tuple("x-ms-lease-delete-snapshots", "include");
+ List> additionalHeaders = new List>() { snapshots };
+ var request = AzureHelper.RequestMessage("DELETE", deleteUrl, AccountName, AccountKey, additionalHeaders);
+ using (HttpResponseMessage response = AzureHelper.RequestWithRetry(Log, client, request).GetAwaiter().GetResult())
+ {
+ return response.IsSuccessStatusCode;
+ }
+ }
+ catch (Exception e)
+ {
+ Log.LogErrorFromException(e, true);
+ }
+ }
+
+ return !Log.HasLoggedErrors;
+ }
+
+ public static bool Execute(string accountName,
+ string accountKey,
+ string connectionString,
+ string containerName,
+ string blobName,
+ IBuildEngine buildengine,
+ ITaskHost taskHost)
+ {
+ DeleteAzureBlob deleteAzureoBlob = new DeleteAzureBlob()
+ {
+ AccountName = accountName,
+ AccountKey = accountKey,
+ ContainerName = containerName,
+ BlobName = blobName,
+ BuildEngine = buildengine,
+ HostObject = taskHost
+ };
+ return deleteAzureoBlob.Execute();
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DownloadFromAzure.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DownloadFromAzure.cs
new file mode 100644
index 000000000..313d48392
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/DownloadFromAzure.cs
@@ -0,0 +1,198 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public sealed class DownloadFromAzure : AzureConnectionStringBuildTask
+ {
+ ///
+ /// The name of the container to access. The specified name must be in the correct format, see the
+ /// following page for more info. https://msdn.microsoft.com/en-us/library/azure/dd135715.aspx
+ ///
+ [Required]
+ public string ContainerName { get; set; }
+
+ ///
+ /// Directory to download blob files to.
+ ///
+ [Required]
+ public string DownloadDirectory { get; set; }
+
+ public string BlobNamePrefix { get; set; }
+
+ public string BlobNameExtension { get; set; }
+
+ public ITaskItem[] BlobNames { get; set; }
+
+ public bool DownloadFlatFiles { get; set; }
+
+ public int MaxClients { get; set; } = 8;
+
+ private static readonly CancellationTokenSource TokenSource = new CancellationTokenSource();
+ private static readonly CancellationToken CancellationToken = TokenSource.Token;
+
+ public void Cancel()
+ {
+ TokenSource.Cancel();
+ }
+
+ public override bool Execute()
+ {
+ return ExecuteAsync(CancellationToken).GetAwaiter().GetResult();
+ }
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ ParseConnectionString();
+ // If the connection string AND AccountKey & AccountName are provided, error out.
+ if (Log.HasLoggedErrors)
+ {
+ return false;
+ }
+
+ Log.LogMessage(MessageImportance.Normal, "Downloading contents of container {0} from storage account '{1}' to directory {2}.",
+ ContainerName, AccountName, DownloadDirectory);
+
+ try
+ {
+ List blobNames = new List();
+ if (BlobNames == null)
+ {
+ ListAzureBlobs listAzureBlobs = new ListAzureBlobs()
+ {
+ AccountName = AccountName,
+ AccountKey = AccountKey,
+ ContainerName = ContainerName,
+ FilterBlobNames = BlobNamePrefix,
+ BuildEngine = this.BuildEngine,
+ HostObject = this.HostObject
+ };
+ listAzureBlobs.Execute();
+ blobNames = listAzureBlobs.BlobNames.ToList();
+ }
+ else
+ {
+ blobNames = BlobNames.Select(b => b.ItemSpec).ToList();
+ if (BlobNamePrefix != null)
+ {
+ blobNames = blobNames.Where(b => b.StartsWith(BlobNamePrefix)).ToList();
+ }
+ }
+
+ if (BlobNameExtension != null)
+ {
+ blobNames = blobNames.Where(b => Path.GetExtension(b) == BlobNameExtension).ToList();
+ }
+
+ if (!Directory.Exists(DownloadDirectory))
+ {
+ Directory.CreateDirectory(DownloadDirectory);
+ }
+ using (var clientThrottle = new SemaphoreSlim(this.MaxClients, this.MaxClients))
+ using (HttpClient client = new HttpClient())
+ {
+ client.Timeout = TimeSpan.FromMinutes(10);
+ await Task.WhenAll(blobNames.Select(item => DownloadItem(client, ct, item, clientThrottle)));
+ }
+ }
+ catch (Exception e)
+ {
+ Log.LogError(e.ToString());
+ }
+ return !Log.HasLoggedErrors;
+ }
+
+ private async Task DownloadItem(HttpClient client, CancellationToken ct, string blob, SemaphoreSlim clientThrottle)
+ {
+ await clientThrottle.WaitAsync();
+ string filename = string.Empty;
+ try {
+ Log.LogMessage(MessageImportance.Low, "Downloading BLOB - {0}", blob);
+ string blobUrl = AzureHelper.GetBlobRestUrl(AccountName, ContainerName, blob);
+ filename = Path.Combine(DownloadDirectory, Path.GetFileName(blob));
+
+ if (!DownloadFlatFiles)
+ {
+ int dirIndex = blob.LastIndexOf("/");
+ string blobDirectory = string.Empty;
+ string blobFilename = string.Empty;
+
+ if (dirIndex == -1)
+ {
+ blobFilename = blob;
+ }
+ else
+ {
+ blobDirectory = blob.Substring(0, dirIndex);
+ blobFilename = blob.Substring(dirIndex + 1);
+
+ // Trim blob name prefix (directory part) from download to blob directory
+ if (BlobNamePrefix != null)
+ {
+ if (BlobNamePrefix.Length > dirIndex)
+ {
+ BlobNamePrefix = BlobNamePrefix.Substring(0, dirIndex);
+ }
+ blobDirectory = blobDirectory.Substring(BlobNamePrefix.Length);
+ }
+ }
+ string downloadBlobDirectory = Path.Combine(DownloadDirectory, blobDirectory);
+ if (!Directory.Exists(downloadBlobDirectory))
+ {
+ Directory.CreateDirectory(downloadBlobDirectory);
+ }
+ filename = Path.Combine(downloadBlobDirectory, blobFilename);
+ }
+
+ var createRequest = AzureHelper.RequestMessage("GET", blobUrl, AccountName, AccountKey);
+
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(Log, client, createRequest))
+ {
+ if (response.IsSuccessStatusCode)
+ {
+ // Blobs can be files but have the name of a directory. We'll skip those and log something weird happened.
+ if (!string.IsNullOrEmpty(Path.GetFileName(filename)))
+ {
+ Stream responseStream = await response.Content.ReadAsStreamAsync();
+
+ using (FileStream sourceStream = File.Open(filename, FileMode.Create))
+ {
+ responseStream.CopyTo(sourceStream);
+ }
+ }
+ else
+ {
+ Log.LogWarning($"Unable to download blob '{blob}' as it has a directory-like name. This may cause problems if it was needed.");
+ }
+ }
+ else
+ {
+ Log.LogError("Failed to retrieve blob {0}, the status code was {1}", blob, response.StatusCode);
+ }
+ }
+ }
+ catch (PathTooLongException)
+ {
+ Log.LogError($"Unable to download blob as it exceeds the maximum allowed path length. Path: {filename}. Length:{filename.Length}");
+ }
+ catch (Exception ex)
+ {
+ Log.LogError(ex.ToString());
+ }
+ finally
+ {
+ clientThrottle.Release();
+ }
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/ListAzureBlobs.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/ListAzureBlobs.cs
new file mode 100644
index 000000000..189229e90
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/ListAzureBlobs.cs
@@ -0,0 +1,128 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using Microsoft.Build.Utilities;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net.Http;
+using System.Threading.Tasks;
+using System.Xml;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public partial class ListAzureBlobs : AzureConnectionStringBuildTask
+ {
+
+ ///
+ /// The name of the container to access. The specified name must be in the correct format, see the
+ /// following page for more info. https://msdn.microsoft.com/en-us/library/azure/dd135715.aspx
+ ///
+ [Required]
+ public string ContainerName { get; set; }
+
+ [Output]
+ public string[] BlobNames { get; set; }
+
+ public string FilterBlobNames { get; set; }
+
+ public override bool Execute()
+ {
+ return ExecuteAsync().GetAwaiter().GetResult();
+ }
+
+ public static string[] Execute(string accountName,
+ string accountKey,
+ string connectionString,
+ string containerName,
+ string filterBlobNames,
+ IBuildEngine buildengine,
+ ITaskHost taskHost)
+ {
+ ListAzureBlobs getAzureBlobList = new ListAzureBlobs()
+ {
+ AccountName = accountName,
+ AccountKey = accountKey,
+ ContainerName = containerName,
+ FilterBlobNames = filterBlobNames,
+ BuildEngine = buildengine,
+ HostObject = taskHost
+ };
+ getAzureBlobList.Execute();
+ return getAzureBlobList.BlobNames;
+ }
+
+ // This code is duplicated in BuildTools task DownloadFromAzure, and that code should be refactored to permit blob listing.
+ public async Task ExecuteAsync()
+ {
+ ParseConnectionString();
+ try
+ {
+ List blobNames = await ListBlobs(Log, AccountName, AccountKey, ContainerName, FilterBlobNames);
+ BlobNames = blobNames.ToArray();
+ }
+ catch (Exception e)
+ {
+ Log.LogErrorFromException(e, true);
+ }
+ return !Log.HasLoggedErrors;
+ }
+
+ public static async Task> ListBlobs(TaskLoggingHelper Log, string AccountName, string AccountKey, string ContainerName, string FilterBlobNames)
+ {
+ List blobsNames = new List();
+ string urlListBlobs = string.Format("https://{0}.blob.core.windows.net/{1}?restype=container&comp=list", AccountName, ContainerName);
+ if (!string.IsNullOrWhiteSpace(FilterBlobNames))
+ {
+ urlListBlobs += $"&prefix={FilterBlobNames}";
+ }
+ Log.LogMessage(MessageImportance.Low, "Sending request to list blobsNames for container '{0}'.", ContainerName);
+
+ using (HttpClient client = new HttpClient())
+ {
+
+ var createRequest = AzureHelper.RequestMessage("GET", urlListBlobs, AccountName, AccountKey);
+
+ XmlDocument responseFile;
+ string nextMarker = string.Empty;
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(Log, client, createRequest))
+ {
+ responseFile = new XmlDocument();
+ responseFile.LoadXml(await response.Content.ReadAsStringAsync());
+ XmlNodeList elemList = responseFile.GetElementsByTagName("Name");
+
+ blobsNames.AddRange(elemList.Cast()
+ .Select(x => x.InnerText)
+ .ToList());
+
+ nextMarker = responseFile.GetElementsByTagName("NextMarker").Cast().FirstOrDefault()?.InnerText;
+ }
+ while (!string.IsNullOrEmpty(nextMarker))
+ {
+ urlListBlobs = string.Format($"https://{AccountName}.blob.core.windows.net/{ContainerName}?restype=container&comp=list&marker={nextMarker}");
+ if (!string.IsNullOrWhiteSpace(FilterBlobNames))
+ {
+ urlListBlobs += $"&prefix={FilterBlobNames}";
+ }
+ var nextRequest = AzureHelper.RequestMessage("GET", urlListBlobs, AccountName, AccountKey);
+ using (HttpResponseMessage nextResponse = AzureHelper.RequestWithRetry(Log, client, nextRequest).GetAwaiter().GetResult())
+ {
+ responseFile = new XmlDocument();
+ responseFile.LoadXml(await nextResponse.Content.ReadAsStringAsync());
+ XmlNodeList elemList = responseFile.GetElementsByTagName("Name");
+
+ blobsNames.AddRange(elemList.Cast()
+ .Select(x => x.InnerText)
+ .ToList());
+
+ nextMarker = responseFile.GetElementsByTagName("NextMarker").Cast().FirstOrDefault()?.InnerText;
+ }
+ }
+ }
+ return blobsNames;
+ }
+ }
+
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/PublishStringToAzureBlob.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/PublishStringToAzureBlob.cs
new file mode 100644
index 000000000..873d93c57
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/PublishStringToAzureBlob.cs
@@ -0,0 +1,76 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public partial class PublishStringToAzureBlob : AzureConnectionStringBuildTask
+ {
+ [Required]
+ public string BlobName { get; set; }
+ [Required]
+ public string ContainerName { get; set; }
+ [Required]
+ public string Content { get; set; }
+ public string ContentType { get; set; }
+
+ public override bool Execute()
+ {
+ ParseConnectionString();
+
+ string blobUrl = AzureHelper.GetBlobRestUrl(AccountName, ContainerName, BlobName);
+ using (HttpClient client = new HttpClient())
+ {
+ try
+ {
+ Tuple headerBlobType = new Tuple("x-ms-blob-type", "BlockBlob");
+ List> additionalHeaders = new List>() { headerBlobType };
+
+ if (!string.IsNullOrEmpty(ContentType))
+ {
+ additionalHeaders.Add(new Tuple(AzureHelper.ContentTypeString, ContentType));
+ }
+
+ var request = AzureHelper.RequestMessage("PUT", blobUrl, AccountName, AccountKey, additionalHeaders, Content);
+
+ AzureHelper.RequestWithRetry(Log, client, request).GetAwaiter().GetResult();
+ }
+ catch (Exception e)
+ {
+ Log.LogErrorFromException(e, true);
+ }
+ }
+
+ return !Log.HasLoggedErrors;
+ }
+
+ public static bool Execute(string accountName,
+ string accountKey,
+ string connectionString,
+ string containerName,
+ string blobName,
+ string content,
+ string contentType,
+ IBuildEngine buildengine,
+ ITaskHost taskHost)
+ {
+ PublishStringToAzureBlob publishStringToBlob = new PublishStringToAzureBlob()
+ {
+ AccountName = accountName,
+ AccountKey = accountKey,
+ ContainerName = containerName,
+ BlobName = blobName,
+ Content = content,
+ ContentType = contentType,
+ BuildEngine = buildengine,
+ HostObject = taskHost
+ };
+ return publishStringToBlob.Execute();
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadClient.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadClient.cs
new file mode 100644
index 000000000..ce31e8d78
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadClient.cs
@@ -0,0 +1,285 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using Microsoft.Build.Utilities;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Task = System.Threading.Tasks.Task;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+ public class UploadClient
+ {
+ private TaskLoggingHelper log;
+
+ public UploadClient(TaskLoggingHelper loggingHelper)
+ {
+ log = loggingHelper;
+ }
+
+ public string EncodeBlockIds(int numberOfBlocks, int lengthOfId)
+ {
+ string numberOfBlocksString = numberOfBlocks.ToString("D" + lengthOfId);
+ if (Encoding.UTF8.GetByteCount(numberOfBlocksString) <= 64)
+ {
+ byte[] bytes = Encoding.UTF8.GetBytes(numberOfBlocksString);
+ return Convert.ToBase64String(bytes);
+ }
+ else
+ {
+ throw new Exception("Task failed - Could not encode block id.");
+ }
+ }
+
+ public async Task UploadBlockBlobAsync(
+ CancellationToken ct,
+ string AccountName,
+ string AccountKey,
+ string ContainerName,
+ string filePath,
+ string destinationBlob,
+ string contentType,
+ int uploadTimeout,
+ string leaseId = "")
+ {
+ string resourceUrl = AzureHelper.GetContainerRestUrl(AccountName, ContainerName);
+
+ string fileName = destinationBlob;
+ fileName = fileName.Replace("\\", "/");
+ string blobUploadUrl = resourceUrl + "/" + fileName;
+ int size = (int)new FileInfo(filePath).Length;
+ int blockSize = 4 * 1024 * 1024; //4MB max size of a block blob
+ int bytesLeft = size;
+ List blockIds = new List();
+ int numberOfBlocks = (size / blockSize) + 1;
+ int countForId = 0;
+ using (FileStream fileStreamTofilePath = new FileStream(filePath, FileMode.Open, FileAccess.Read))
+ {
+ int offset = 0;
+
+ while (bytesLeft > 0)
+ {
+ int nextBytesToRead = (bytesLeft < blockSize) ? bytesLeft : blockSize;
+ byte[] fileBytes = new byte[blockSize];
+ int read = fileStreamTofilePath.Read(fileBytes, 0, nextBytesToRead);
+
+ if (nextBytesToRead != read)
+ {
+ throw new Exception(string.Format(
+ "Number of bytes read ({0}) from file {1} isn't equal to the number of bytes expected ({2}) .",
+ read, fileName, nextBytesToRead));
+ }
+
+ string blockId = EncodeBlockIds(countForId, numberOfBlocks.ToString().Length);
+
+ blockIds.Add(blockId);
+ string blockUploadUrl = blobUploadUrl + "?comp=block&blockid=" + WebUtility.UrlEncode(blockId);
+
+ using (HttpClient client = new HttpClient())
+ {
+ client.DefaultRequestHeaders.Clear();
+
+ // In random occassions the request fails if the network is slow and it takes more than 100 seconds to upload 4MB.
+ client.Timeout = TimeSpan.FromMinutes(uploadTimeout);
+ Func createRequest = () =>
+ {
+ DateTime dt = DateTime.UtcNow;
+ var req = new HttpRequestMessage(HttpMethod.Put, blockUploadUrl);
+ req.Headers.Add(
+ AzureHelper.DateHeaderString,
+ dt.ToString("R", CultureInfo.InvariantCulture));
+ req.Headers.Add(AzureHelper.VersionHeaderString, AzureHelper.StorageApiVersion);
+ if (!string.IsNullOrWhiteSpace(leaseId))
+ {
+ log.LogMessage($"Sending request: {leaseId} {blockUploadUrl}");
+ req.Headers.Add("x-ms-lease-id", leaseId);
+ }
+ req.Headers.Add(
+ AzureHelper.AuthorizationHeaderString,
+ AzureHelper.AuthorizationHeader(
+ AccountName,
+ AccountKey,
+ "PUT",
+ dt,
+ req,
+ string.Empty,
+ string.Empty,
+ nextBytesToRead.ToString(),
+ string.Empty));
+
+ Stream postStream = new MemoryStream();
+ postStream.Write(fileBytes, 0, nextBytesToRead);
+ postStream.Seek(0, SeekOrigin.Begin);
+ req.Content = new StreamContent(postStream);
+ return req;
+ };
+
+ log.LogMessage(MessageImportance.Low, "Sending request to upload part {0} of file {1}", countForId, fileName);
+
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(log, client, createRequest))
+ {
+ log.LogMessage(
+ MessageImportance.Low,
+ "Received response to upload part {0} of file {1}: Status Code:{2} Status Desc: {3}",
+ countForId,
+ fileName,
+ response.StatusCode,
+ await response.Content.ReadAsStringAsync());
+ }
+ }
+
+ offset += read;
+ bytesLeft -= nextBytesToRead;
+ countForId += 1;
+ }
+ }
+
+ string blockListUploadUrl = blobUploadUrl + "?comp=blocklist";
+
+ using (HttpClient client = new HttpClient())
+ {
+ Func createRequest = () =>
+ {
+ DateTime dt1 = DateTime.UtcNow;
+ var req = new HttpRequestMessage(HttpMethod.Put, blockListUploadUrl);
+ req.Headers.Add(AzureHelper.DateHeaderString, dt1.ToString("R", CultureInfo.InvariantCulture));
+ req.Headers.Add(AzureHelper.VersionHeaderString, AzureHelper.StorageApiVersion);
+ if (string.IsNullOrEmpty(contentType))
+ {
+ contentType = DetermineContentTypeBasedOnFileExtension(filePath);
+ }
+ if (!string.IsNullOrEmpty(contentType))
+ {
+ req.Headers.Add(AzureHelper.ContentTypeString, contentType);
+ }
+ string cacheControl = DetermineCacheControlBasedOnFileExtension(filePath);
+ if (!string.IsNullOrEmpty(cacheControl))
+ {
+ req.Headers.Add(AzureHelper.CacheControlString, cacheControl);
+ }
+
+ var body = new StringBuilder("");
+ foreach (object item in blockIds)
+ body.AppendFormat("{0}", item);
+
+ body.Append("");
+ byte[] bodyData = Encoding.UTF8.GetBytes(body.ToString());
+ if (!string.IsNullOrWhiteSpace(leaseId))
+ {
+ log.LogMessage($"Sending list request: {leaseId} {blockListUploadUrl}");
+ req.Headers.Add("x-ms-lease-id", leaseId);
+ }
+ req.Headers.Add(
+ AzureHelper.AuthorizationHeaderString,
+ AzureHelper.AuthorizationHeader(
+ AccountName,
+ AccountKey,
+ "PUT",
+ dt1,
+ req,
+ string.Empty,
+ string.Empty,
+ bodyData.Length.ToString(),
+ string.Empty));
+
+ Stream postStream = new MemoryStream();
+ postStream.Write(bodyData, 0, bodyData.Length);
+ postStream.Seek(0, SeekOrigin.Begin);
+ req.Content = new StreamContent(postStream);
+ return req;
+ };
+
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(log, client, createRequest))
+ {
+ log.LogMessage(
+ MessageImportance.Low,
+ "Received response to combine block list for file {0}: Status Code:{1} Status Desc: {2}",
+ fileName,
+ response.StatusCode,
+ await response.Content.ReadAsStringAsync());
+ }
+ }
+ }
+
+ public async Task FileEqualsExistingBlobAsync(
+ string accountName,
+ string accountKey,
+ string containerName,
+ string filePath,
+ string destinationBlob,
+ int uploadTimeout)
+ {
+ using (var client = new HttpClient
+ {
+ Timeout = TimeSpan.FromMinutes(uploadTimeout)
+ })
+ {
+ log.LogMessage(
+ MessageImportance.Low,
+ $"Downloading blob {destinationBlob} to check if identical.");
+
+ string blobUrl = AzureHelper.GetBlobRestUrl(accountName, containerName, destinationBlob);
+ var createRequest = AzureHelper.RequestMessage("GET", blobUrl, accountName, accountKey);
+
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(
+ log,
+ client,
+ createRequest))
+ {
+ if (!response.IsSuccessStatusCode)
+ {
+ throw new HttpRequestException(
+ $"Failed to retrieve existing blob {destinationBlob}, " +
+ $"status code {response.StatusCode}.");
+ }
+
+ byte[] existingBytes = await response.Content.ReadAsByteArrayAsync();
+ byte[] localBytes = File.ReadAllBytes(filePath);
+
+ bool equal = localBytes.SequenceEqual(existingBytes);
+
+ if (equal)
+ {
+ log.LogMessage(
+ MessageImportance.Normal,
+ "Item exists in blob storage, and is verified to be identical. " +
+ $"File: '{filePath}' Blob: '{destinationBlob}'");
+ }
+
+ return equal;
+ }
+ }
+ }
+
+ private string DetermineContentTypeBasedOnFileExtension(string filename)
+ {
+ if (Path.GetExtension(filename) == ".svg")
+ {
+ return "image/svg+xml";
+ }
+ else if (Path.GetExtension(filename) == ".version")
+ {
+ return "text/plain";
+ }
+ return string.Empty;
+ }
+ private string DetermineCacheControlBasedOnFileExtension(string filename)
+ {
+ if (Path.GetExtension(filename) == ".svg")
+ {
+ return "No-Cache";
+ }
+ return string.Empty;
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadToAzure.cs b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadToAzure.cs
new file mode 100644
index 000000000..f815dda63
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/CloudTestTasks/UploadToAzure.cs
@@ -0,0 +1,208 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using System.Xml;
+using ThreadingTask = System.Threading.Tasks.Task;
+
+namespace Microsoft.DotNet.Build.CloudTestTasks
+{
+
+ public class UploadToAzure : AzureConnectionStringBuildTask, ICancelableTask
+ {
+ private static readonly CancellationTokenSource TokenSource = new CancellationTokenSource();
+ private static readonly CancellationToken CancellationToken = TokenSource.Token;
+
+ ///
+ /// The name of the container to access. The specified name must be in the correct format, see the
+ /// following page for more info. https://msdn.microsoft.com/en-us/library/azure/dd135715.aspx
+ ///
+ [Required]
+ public string ContainerName { get; set; }
+
+ ///
+ /// An item group of files to upload. Each item must have metadata RelativeBlobPath
+ /// that specifies the path relative to ContainerName where the item will be uploaded.
+ ///
+ [Required]
+ public ITaskItem[] Items { get; set; }
+
+ ///
+ /// Indicates if the destination blob should be overwritten if it already exists. The default if false.
+ ///
+ public bool Overwrite { get; set; } = false;
+
+ ///
+ /// Enables idempotency when Overwrite is false.
+ ///
+ /// false: (default) Attempting to upload an item that already exists fails.
+ ///
+ /// true: When an item already exists, download the existing blob to check if it's
+ /// byte-for-byte identical to the one being uploaded. If so, pass. If not, fail.
+ ///
+ public bool PassIfExistingItemIdentical { get; set; }
+
+ ///
+ /// Specifies the maximum number of clients to concurrently upload blobs to azure
+ ///
+ public int MaxClients { get; set; } = 8;
+
+ public int UploadTimeoutInMinutes { get; set; } = 5;
+
+ public void Cancel()
+ {
+ TokenSource.Cancel();
+ }
+
+ public override bool Execute()
+ {
+ return ExecuteAsync(CancellationToken).GetAwaiter().GetResult();
+ }
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ ParseConnectionString();
+ // If the connection string AND AccountKey & AccountName are provided, error out.
+ if (Log.HasLoggedErrors)
+ {
+ return false;
+ }
+
+ Log.LogMessage(
+ MessageImportance.Normal,
+ "Begin uploading blobs to Azure account {0} in container {1}.",
+ AccountName,
+ ContainerName);
+
+ if (Items.Length == 0)
+ {
+ Log.LogError("No items were provided for upload.");
+ return false;
+ }
+
+ // first check what blobs are present
+ string checkListUrl = $"{AzureHelper.GetContainerRestUrl(AccountName, ContainerName)}?restype=container&comp=list";
+
+ HashSet blobsPresent = new HashSet(StringComparer.OrdinalIgnoreCase);
+
+ try
+ {
+ using (HttpClient client = new HttpClient())
+ {
+ var createRequest = AzureHelper.RequestMessage("GET", checkListUrl, AccountName, AccountKey);
+
+ Log.LogMessage(MessageImportance.Low, "Sending request to check whether Container blobs exist");
+ using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(Log, client, createRequest))
+ {
+ var doc = new XmlDocument();
+ doc.LoadXml(await response.Content.ReadAsStringAsync());
+
+ XmlNodeList nodes = doc.DocumentElement.GetElementsByTagName("Blob");
+
+ foreach (XmlNode node in nodes)
+ {
+ blobsPresent.Add(node["Name"].InnerText);
+ }
+
+ Log.LogMessage(MessageImportance.Low, "Received response to check whether Container blobs exist");
+ }
+ }
+
+ using (var clientThrottle = new SemaphoreSlim(this.MaxClients, this.MaxClients))
+ {
+ await ThreadingTask.WhenAll(Items.Select(item => UploadAsync(ct, item, blobsPresent, clientThrottle)));
+ }
+
+ Log.LogMessage(MessageImportance.Normal, "Upload to Azure is complete, a total of {0} items were uploaded.", Items.Length);
+ }
+ catch (Exception e)
+ {
+ Log.LogErrorFromException(e, true);
+ }
+ return !Log.HasLoggedErrors;
+ }
+
+ private async ThreadingTask UploadAsync(CancellationToken ct, ITaskItem item, HashSet blobsPresent, SemaphoreSlim clientThrottle)
+ {
+ if (ct.IsCancellationRequested)
+ {
+ Log.LogError("Task UploadToAzure cancelled");
+ ct.ThrowIfCancellationRequested();
+ }
+
+ string relativeBlobPath = item.GetMetadata("RelativeBlobPath");
+ if (string.IsNullOrEmpty(relativeBlobPath))
+ throw new Exception(string.Format("Metadata 'RelativeBlobPath' is missing for item '{0}'.", item.ItemSpec));
+
+ if (!File.Exists(item.ItemSpec))
+ throw new Exception(string.Format("The file '{0}' does not exist.", item.ItemSpec));
+
+ UploadClient uploadClient = new UploadClient(Log);
+
+ if (!Overwrite && blobsPresent.Contains(relativeBlobPath))
+ {
+ if (PassIfExistingItemIdentical &&
+ await ItemEqualsExistingBlobAsync(item, relativeBlobPath, uploadClient, clientThrottle))
+ {
+ return;
+ }
+
+ throw new Exception(string.Format("The blob '{0}' already exists.", relativeBlobPath));
+ }
+
+ string contentType = item.GetMetadata("ContentType");
+
+ await clientThrottle.WaitAsync();
+
+ try
+ {
+ Log.LogMessage("Uploading {0} to {1}.", item.ItemSpec, ContainerName);
+ await
+ uploadClient.UploadBlockBlobAsync(
+ ct,
+ AccountName,
+ AccountKey,
+ ContainerName,
+ item.ItemSpec,
+ relativeBlobPath,
+ contentType,
+ UploadTimeoutInMinutes);
+ }
+ finally
+ {
+ clientThrottle.Release();
+ }
+ }
+
+ private async Task ItemEqualsExistingBlobAsync(
+ ITaskItem item,
+ string relativeBlobPath,
+ UploadClient client,
+ SemaphoreSlim clientThrottle)
+ {
+ await clientThrottle.WaitAsync();
+ try
+ {
+ return await client.FileEqualsExistingBlobAsync(
+ AccountName,
+ AccountKey,
+ ContainerName,
+ item.ItemSpec,
+ relativeBlobPath,
+ UploadTimeoutInMinutes);
+ }
+ finally
+ {
+ clientThrottle.Release();
+ }
+ }
+ }
+}
diff --git a/tools-local/tasks/BuildTools.Publish/README.md b/tools-local/tasks/BuildTools.Publish/README.md
new file mode 100644
index 000000000..24fb050f7
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/README.md
@@ -0,0 +1,11 @@
+The files in this directory are the closure of C# code from the BuildTools repo
+that's necessary for the publish tasks. There are no changes except
+automatically removing and sorting the using statements.
+
+Source: https://github.com/dotnet/buildtools/tree/55d43483866c7caeeace96355add3a9b12fa5795
+
+Using the existing BuildTools code reduces the risk of behavior differences vs.
+trying to find equivalents in Arcade. The upcoming new Arcade-powered publish
+functionality makes short-term effort to deduplicate these tasks throwaway work.
+
+See [core-setup/#7118 "Use Arcade publish functionality"](https://github.com/dotnet/core-setup/issues/7118)
diff --git a/tools-local/tasks/BuildTools.Publish/Tasks/GenerateChecksums.cs b/tools-local/tasks/BuildTools.Publish/Tasks/GenerateChecksums.cs
new file mode 100644
index 000000000..e662bb076
--- /dev/null
+++ b/tools-local/tasks/BuildTools.Publish/Tasks/GenerateChecksums.cs
@@ -0,0 +1,67 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.IO;
+using System.Security.Cryptography;
+
+namespace Microsoft.DotNet.Build.Tasks
+{
+ public class GenerateChecksums : BuildTask
+ {
+ ///
+ /// An item collection of files for which to generate checksums. Each item must have metadata
+ /// 'DestinationPath' that specifies the path of the checksum file to create.
+ ///
+ [Required]
+ public ITaskItem[] Items { get; set; }
+
+ public override bool Execute()
+ {
+ foreach (ITaskItem item in Items)
+ {
+ try
+ {
+ string destinationPath = item.GetMetadata("DestinationPath");
+ if (string.IsNullOrEmpty(destinationPath))
+ {
+ throw new Exception($"Metadata 'DestinationPath' is missing for item '{item.ItemSpec}'.");
+ }
+
+ if (!File.Exists(item.ItemSpec))
+ {
+ throw new Exception($"The file '{item.ItemSpec}' does not exist.");
+ }
+
+ Log.LogMessage(
+ MessageImportance.High,
+ "Generating checksum for '{0}' into '{1}'...",
+ item.ItemSpec,
+ destinationPath);
+
+ using (FileStream stream = File.OpenRead(item.ItemSpec))
+ {
+ using(HashAlgorithm hashAlgorithm = SHA512.Create())
+ {
+ byte[] hash = hashAlgorithm.ComputeHash(stream);
+ string checksum = BitConverter.ToString(hash).Replace("-", string.Empty);
+ File.WriteAllText(destinationPath, checksum);
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ // We have 2 log calls because we want a nice error message but we also want to capture the
+ // callstack in the log.
+ Log.LogError("An exception occurred while trying to generate a checksum for '{0}'.", item.ItemSpec);
+ Log.LogMessage(MessageImportance.Low, e.ToString());
+ return false;
+ }
+ }
+
+ return true;
+ }
+ }
+}
diff --git a/tools-local/tasks/FinalizeBuild.cs b/tools-local/tasks/FinalizeBuild.cs
new file mode 100644
index 000000000..df14cafb5
--- /dev/null
+++ b/tools-local/tasks/FinalizeBuild.cs
@@ -0,0 +1,203 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using Microsoft.DotNet.Build.CloudTestTasks;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text.RegularExpressions;
+using System.Threading.Tasks;
+
+namespace Microsoft.DotNet.Build.Tasks
+{
+ public class FinalizeBuild : AzureConnectionStringBuildTask
+ {
+ [Required]
+ public string SemaphoreBlob { get; set; }
+ [Required]
+ public string FinalizeContainer { get; set; }
+ public string MaxWait { get; set; }
+ public string Delay { get; set; }
+ [Required]
+ public string ContainerName { get; set; }
+ [Required]
+ public string ProductBlobStorageName { get; set; }
+ [Required]
+ public string Channel { get; set; }
+ [Required]
+ public string SharedFrameworkNugetVersion { get; set; }
+ [Required]
+ public string ProductVersion { get; set; }
+ [Required]
+ public string Version { get; set; }
+ [Required]
+ public string CommitHash { get; set; }
+ public bool ForcePublish { get; set; }
+
+ private Regex _versionRegex = new Regex(@"(?\d+\.\d+\.\d+)(-(?[^-]+-)?(?\d+)-(?\d+))?");
+
+ public override bool Execute()
+ {
+ ParseConnectionString();
+
+ if (Log.HasLoggedErrors)
+ {
+ return false;
+ }
+
+ if (!FinalizeContainer.EndsWith("/"))
+ {
+ FinalizeContainer = $"{FinalizeContainer}/";
+ }
+ string targetVersionFile = $"{FinalizeContainer}{Version}";
+
+ CreateBlobIfNotExists(SemaphoreBlob);
+
+ AzureBlobLease blobLease = new AzureBlobLease(AccountName, AccountKey, ConnectionString, ContainerName, SemaphoreBlob, Log);
+ Log.LogMessage($"Acquiring lease on semaphore blob '{SemaphoreBlob}'");
+ blobLease.Acquire();
+
+ // Prevent race conditions by dropping a version hint of what version this is. If we see this file
+ // and it is the same as our version then we know that a race happened where two+ builds finished
+ // at the same time and someone already took care of publishing and we have no work to do.
+ if (IsLatestSpecifiedVersion(targetVersionFile) && !ForcePublish)
+ {
+ Log.LogMessage(MessageImportance.Low, $"version hint file for publishing finalization is {targetVersionFile}");
+ Log.LogMessage(MessageImportance.High, $"Version '{Version}' is already published, skipping finalization.");
+ Log.LogMessage($"Releasing lease on semaphore blob '{SemaphoreBlob}'");
+ blobLease.Release();
+ return true;
+ }
+ else
+ {
+
+ // Delete old version files
+ GetBlobList(FinalizeContainer)
+ .Select(s => s.Replace("/dotnet/", ""))
+ .Where(w => _versionRegex.Replace(Path.GetFileName(w), "") == "")
+ .ToList()
+ .ForEach(f => TryDeleteBlob(f));
+
+
+ // Drop the version file signaling such for any race-condition builds (see above comment).
+ CreateBlobIfNotExists(targetVersionFile);
+
+ try
+ {
+ CopyBlobs(
+ $"{ProductBlobStorageName}/{ProductVersion}/",
+ $"{ProductBlobStorageName}/{Channel}/");
+
+ // Generate the latest version text file
+ string sfxVersion = GetSharedFrameworkVersionFileContent();
+ PublishStringToBlob(
+ ContainerName,
+ $"{ProductBlobStorageName}/{Channel}/latest.version",
+ sfxVersion,
+ "text/plain");
+ }
+ finally
+ {
+ blobLease.Release();
+ }
+ }
+ return !Log.HasLoggedErrors;
+ }
+
+ private string GetSharedFrameworkVersionFileContent()
+ {
+ string returnString = $"{CommitHash}{Environment.NewLine}";
+ returnString += $"{SharedFrameworkNugetVersion}{Environment.NewLine}";
+ return returnString;
+ }
+
+ public bool CopyBlobs(string sourceFolder, string destinationFolder)
+ {
+ bool returnStatus = true;
+ List> copyTasks = new List>();
+ string[] blobs = GetBlobList(sourceFolder);
+ foreach (string blob in blobs)
+ {
+ string targetName = Path.GetFileName(blob)
+ .Replace(SharedFrameworkNugetVersion, "latest");
+ string sourceBlob = blob.Replace($"/{ContainerName}/", "");
+ string destinationBlob = $"{destinationFolder}{targetName}";
+ Log.LogMessage($"Copying blob '{sourceBlob}' to '{destinationBlob}'");
+ copyTasks.Add(CopyBlobAsync(sourceBlob, destinationBlob));
+ }
+ Task.WaitAll(copyTasks.ToArray());
+ copyTasks.ForEach(c => returnStatus &= c.Result);
+ return returnStatus;
+ }
+
+ public bool TryDeleteBlob(string path)
+ {
+ return DeleteBlob(ContainerName, path);
+ }
+
+ public void CreateBlobIfNotExists(string path)
+ {
+ var blobList = GetBlobList(path);
+ if(blobList.Count() == 0)
+ {
+ PublishStringToBlob(ContainerName, path, DateTime.Now.ToString());
+ }
+ }
+
+ public bool IsLatestSpecifiedVersion(string versionFile)
+ {
+ var blobList = GetBlobList(versionFile);
+ return blobList.Count() != 0;
+ }
+
+ public bool DeleteBlob(string container, string blob)
+ {
+ return DeleteAzureBlob.Execute(AccountName,
+ AccountKey,
+ ConnectionString,
+ container,
+ blob,
+ BuildEngine,
+ HostObject);
+ }
+
+ public Task CopyBlobAsync(string sourceBlobName, string destinationBlobName)
+ {
+ return CopyAzureBlobToBlob.ExecuteAsync(AccountName,
+ AccountKey,
+ ConnectionString,
+ ContainerName,
+ sourceBlobName,
+ destinationBlobName,
+ BuildEngine,
+ HostObject);
+ }
+
+ public string[] GetBlobList(string path)
+ {
+ return ListAzureBlobs.Execute(AccountName,
+ AccountKey,
+ ConnectionString,
+ ContainerName,
+ path,
+ BuildEngine,
+ HostObject);
+ }
+
+ public bool PublishStringToBlob(string container, string blob, string contents, string contentType = null)
+ {
+ return PublishStringToAzureBlob.Execute(AccountName,
+ AccountKey,
+ ConnectionString,
+ container,
+ blob,
+ contents,
+ contentType,
+ BuildEngine,
+ HostObject);
+ }
+ }
+}
diff --git a/tools-local/tasks/GetTargetMachineInfo.cs b/tools-local/tasks/GetTargetMachineInfo.cs
new file mode 100644
index 000000000..67aa53b36
--- /dev/null
+++ b/tools-local/tasks/GetTargetMachineInfo.cs
@@ -0,0 +1,67 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System.Runtime.InteropServices;
+
+namespace Microsoft.DotNet.Build.Tasks
+{
+ public class GetTargetMachineInfo : BuildTask
+ {
+ [Output]
+ public string TargetOS { get; set; }
+
+ [Output]
+ public string TargetArch { get; set; }
+
+ [Output]
+ public string RuntimeIdentifier { get; set; }
+
+ public override bool Execute()
+ {
+ switch (RuntimeInformation.OSArchitecture)
+ {
+ case Architecture.X64:
+ TargetArch = "x64";
+ break;
+ case Architecture.X86:
+ TargetArch = "x86";
+ break;
+ case Architecture.Arm:
+ TargetArch = "arm";
+ break;
+ case Architecture.Arm64:
+ TargetArch = "arm64";
+ break;
+ }
+
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ TargetOS = "Windows_NT";
+ else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+ TargetOS = "Linux";
+ else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+ TargetOS = "OSX";
+ else if (RuntimeInformation.IsOSPlatform(OSPlatform.Create("FREEBSD")))
+ TargetOS = "FreeBSD";
+ else if (RuntimeInformation.IsOSPlatform(OSPlatform.Create("NETBSD")))
+ TargetOS = "NetBSD";
+
+ RuntimeIdentifier = Microsoft.DotNet.PlatformAbstractions.RuntimeEnvironment.GetRuntimeIdentifier();
+
+ if (TargetArch == null)
+ {
+ Log.LogError("{0} is null", nameof(TargetArch));
+ return false;
+ }
+
+ if (TargetOS == null)
+ {
+ Log.LogError("{0} is null", nameof(TargetOS));
+ return false;
+ }
+
+ return true;
+ }
+ }
+}
diff --git a/tools-local/tasks/RegenerateReadmeTable.cs b/tools-local/tasks/RegenerateReadmeTable.cs
new file mode 100644
index 000000000..57664a298
--- /dev/null
+++ b/tools-local/tasks/RegenerateReadmeTable.cs
@@ -0,0 +1,197 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.Build.Framework;
+using System;
+using System.IO;
+using System.Linq;
+using System.Text;
+
+namespace Microsoft.DotNet.Build.Tasks
+{
+ public class RegenerateReadmeTable : BuildTask
+ {
+ private const string TableComment = "generated table";
+ private const string LinksComment = "links to include in table";
+
+ ///
+ /// A readme file that contains a Markdown table and a list of links. This task reads the
+ /// "links to include in table" section to find available links, then updates the
+ /// "generated table" section to include a Markdown table. Cells in the table are generated
+ /// by looking for links that apply to the current combination of platform and branch.
+ ///
+ /// The sections are marked by one-line html comments:
+ ///
+ ///
+ /// ...
+ ///
+ ///
+ [Required]
+ public string ReadmeFile { get; set; }
+
+ ///
+ /// %(Identity): Name of this branch, as appears in the column header.
+ /// %(Abbr): Abbreviation of this branch, used to match up with link names.
+ ///
+ [Required]
+ public ITaskItem[] Branches { get; set; }
+
+ ///
+ /// %(Identity): Name of this platform, as appears in bold as the first column of the row.
+ /// %(Parenthetical): An extra non-bold string to add after the platform name.
+ /// %(Abbr): Abbreviation of this platform, used to match up with link names.
+ ///
+ [Required]
+ public ITaskItem[] Platforms { get; set; }
+
+
+ private string Begin(string marker) => $"";
+ private string End(string marker) => $"";
+
+
+ public override bool Execute()
+ {
+ string[] readmeLines = File.ReadAllLines(ReadmeFile);
+
+ if (readmeLines.Contains(Begin(LinksComment)) &&
+ readmeLines.Contains(End(LinksComment)))
+ {
+ // In the links section, extract the name of each reference-style Markdown link.
+ // For example, grabs 'win-x86-badge-2.1.X' from
+ // [win-x86-badge-2.1.X]: https://example.org/foo
+ string[] links = readmeLines
+ .SkipWhile(line => line != Begin(LinksComment))
+ .Skip(1)
+ .TakeWhile(line => line != End(LinksComment))
+ .Where(line => line.StartsWith("[") && line.Contains("]:"))
+ .Select(line => line.Substring(
+ 1,
+ line.IndexOf("]:", StringComparison.Ordinal) - 1))
+ .ToArray();
+
+ string[] rows = Platforms.Select(p => CreateRow(p, links)).ToArray();
+
+ // Final table to write to the file, with a newline before and after.
+ string[] table = new[]
+ {
+ "",
+ $"| Platform |{string.Concat(Branches.Select(p => $" {p.ItemSpec} |"))}",
+ $"| --- | {string.Concat(Enumerable.Repeat(" :---: |", Branches.Length))}"
+ }.Concat(rows).Concat(new[]
+ {
+ ""
+ }).ToArray();
+
+ if (readmeLines.Contains(Begin(TableComment)) &&
+ readmeLines.Contains(End(TableComment)))
+ {
+ string[] beforeTable = readmeLines
+ .TakeWhile(line => line != Begin(TableComment))
+ .Concat(new[] { Begin(TableComment) })
+ .ToArray();
+
+ string[] afterTable = readmeLines
+ .Skip(beforeTable.Length)
+ .SkipWhile(line => line != End(TableComment))
+ .ToArray();
+
+ File.WriteAllLines(
+ ReadmeFile,
+ beforeTable.Concat(table).Concat(afterTable));
+ }
+ else
+ {
+ Log.LogError($"Readme '{ReadmeFile}' has no '{TableComment}' section.");
+ }
+ }
+ else
+ {
+ Log.LogError($"Readme '{ReadmeFile}' has no '{LinksComment}' section.");
+ }
+
+ return !Log.HasLoggedErrors;
+ }
+
+ private string CreateRow(ITaskItem platform, string[] links)
+ {
+ string parenthetical = platform.GetMetadata("Parenthetical");
+
+ string cells = string.Concat(
+ Branches.Select(branch => $" {CreateCell(platform, branch, links)} |"));
+
+ return $"| **{platform.ItemSpec}**{parenthetical} |{cells}";
+ }
+
+ private string CreateCell(ITaskItem platform, ITaskItem branch, string[] links)
+ {
+ string branchAbbr = branch.GetMetadata("Abbr");
+ if (string.IsNullOrEmpty(branchAbbr))
+ {
+ Log.LogError($"Branch '{branch.ItemSpec}' has no Abbr metadata.");
+ }
+
+ string platformAbbr = platform.GetMetadata("Abbr");
+ if (string.IsNullOrEmpty(platformAbbr))
+ {
+ Log.LogError($"Platform '{platform.ItemSpec}' has no Abbr metadata.");
+ }
+
+ var sb = new StringBuilder();
+
+ string Link(string type) => $"{platformAbbr}-{type}-{branchAbbr}";
+
+ void AddLink(string name, string type)
+ {
+ string link = Link(type);
+ string checksum = Link($"{type}-checksum");
+
+ if (links.Contains(link))
+ {
+ sb.Append("
");
+ sb.Append($"[{name}][{link}]");
+ if (links.Contains(checksum))
+ {
+ sb.Append($" ([Checksum][{checksum}])");
+ }
+ }
+ }
+
+ string badge = Link("badge");
+ string version = Link("version");
+
+ if (links.Contains(badge) && links.Contains(version))
+ {
+ sb.Append($"[![][{badge}]][{version}]");
+ }
+
+ // Look for various types of links. The first parameter is the name of the link as it
+ // appears in the table cell. The second parameter is how this type of link is
+ // abbreviated in the link section. A generic checksum link is added for any of these
+ // that also have a '-checksum' link.
+
+ AddLink("Installer", "installer");
+
+ AddLink("Runtime-Deps", "runtime-deps");
+ AddLink("Host", "host");
+ AddLink("Host FX Resolver", "hostfxr");
+ AddLink("Shared Framework", "sharedfx");
+
+ AddLink("zip", "zip");
+ AddLink("tar.gz", "targz");
+
+ AddLink("NetHost (zip)", "nethost-zip");
+ AddLink("NetHost (tar.gz)", "nethost-targz");
+
+ AddLink("Symbols (zip)", "symbols-zip");
+ AddLink("Symbols (tar.gz)", "symbols-targz");
+
+ if (sb.Length == 0)
+ {
+ sb.Append("N/A");
+ }
+
+ return sb.ToString();
+ }
+ }
+}
diff --git a/tools-local/tasks/local.tasks.csproj b/tools-local/tasks/local.tasks.csproj
new file mode 100644
index 000000000..6c4f5cd9a
--- /dev/null
+++ b/tools-local/tasks/local.tasks.csproj
@@ -0,0 +1,41 @@
+
+
+
+ Library
+ netstandard2.0
+ $(TargetFrameworks);net46
+ {360F25FA-3CD9-4338-B961-A4F3122B88B2}
+ $(LocalBuildToolsDir)
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools-local/tasks/local.tasks.sln b/tools-local/tasks/local.tasks.sln
new file mode 100644
index 000000000..2d74b097f
--- /dev/null
+++ b/tools-local/tasks/local.tasks.sln
@@ -0,0 +1,22 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 15
+VisualStudioVersion = 15.0.28307.645
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "local.tasks", "local.tasks.csproj", "{360F25FA-3CD9-4338-B961-A4F3122B88B2}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {360F25FA-3CD9-4338-B961-A4F3122B88B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {360F25FA-3CD9-4338-B961-A4F3122B88B2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {5562AE3C-B1B2-47F7-A661-3A5D47764C0E}
+ EndGlobalSection
+EndGlobal