+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ private void Init(Database database, System.String schema, System.String name, System.String baseServer, System.String baseDatabase, System.String baseSchema, System.String baseObject)
+ {
+ this.Name = name;
+ this.Parent = database;
+ if( null != schema )
+ {
+ this.ChangeSchema(schema, false);
+ }
+ this.BaseServer = baseServer;
+ this.BaseDatabase = baseDatabase;
+ this.BaseSchema = baseSchema;
+ this.BaseObject = baseObject;
+ }
+
+ public Synonym(Database database, System.String name, System.String schema, System.String baseServer, System.String baseDatabase, System.String baseSchema, System.String baseObject)
+ {
+ Init(database, schema, name, baseServer, baseDatabase, baseSchema, baseObject);
+ }
+
+ public Synonym(Database database, System.String name, System.String baseServer, System.String baseDatabase, System.String baseSchema, System.String baseObject)
+ {
+ Init(database, null, name, baseServer, baseDatabase, baseSchema, baseObject);
+ }
+
+
+
+
+
+ public MailProfile(SqlMail parent, string name, string description)
+ {
+ ValidateName(name);
+ this.key = new SimpleObjectKey(name);
+ this.Parent = parent;
+
+ Properties.Get("Description").Value = description;
+ }
+
+
+
+
+
+ public MailAccount(SqlMail parent, string name, string description)
+ {
+ ValidateName(name);
+ this.key = new SimpleObjectKey(name);
+ this.Parent = parent;
+
+ Properties.Get("Description").Value = description;
+ }
+
+ public MailAccount(SqlMail parent, string name, string description, string displayName, string emailAddress)
+ {
+ ValidateName(name);
+ this.key = new SimpleObjectKey(name);
+ this.Parent = parent;
+
+ Properties.Get("Description").Value = description;
+ Properties.Get("DisplayName").Value = displayName;
+ Properties.Get("EmailAddress").Value = emailAddress;
+ }
+
+
+
+
+
+
+
+
+ public MessageTypeMapping(ServiceContract servicecontract, System.String messageName, MessageSource messageSource):
+ base()
+ {
+ ValidateName(messageName);
+ this.key = new SimpleObjectKey(messageName);
+ this.Parent = servicecontract;
+
+ this.MessageSource = messageSource;
+ }
+
+
+
+
+
+
+ public KeyEncryption ():
+ base()
+ {
+ }
+
+
+
+
+
+
+
+
+ private void Init(JobServer jobServer, System.String proxyName, System.String credentialName, System.Boolean enabled, System.String description)
+ {
+ ValidateName(proxyName);
+ this.key = new SimpleObjectKey(proxyName);
+ this.Parent = jobServer;
+
+ m_comparer = jobServer.Parent.Databases["msdb"].StringComparer;
+
+ this.CredentialName = credentialName;
+ this.IsEnabled = enabled;
+ this.Description = description;
+ }
+
+ public ProxyAccount(JobServer jobServer, System.String proxyName, System.String credentialName, System.Boolean enabled, System.String description) :
+ base()
+ {
+ Init(jobServer, proxyName, credentialName, enabled, description);
+ }
+
+ public ProxyAccount(JobServer jobServer, System.String proxyName, System.String credentialName, System.Boolean enabled):
+ base()
+ {
+ Init(jobServer, proxyName, credentialName, enabled, " ");
+ }
+
+ public ProxyAccount(JobServer jobServer, System.String proxyName, System.String credentialName):
+ base()
+ {
+ Init(jobServer, proxyName, credentialName, true, " ");
+ }
+
+
+
+
+
+
+ public class Events
+ {
+ internal Events( parent)
+ {
+ this.parent = parent;
+ }
+
+ public GetEventSelection()
+ {
+ InitializeEvents();
+ return () this.serverEventsWorker.GetEventSelection();
+ }
+
+ public void SubscribeToEvents( events)
+ {
+ InitializeEvents();
+ this.serverEventsWorker.SubscribeToEvents(events, null);
+ }
+
+ public void SubscribeToEvents( events, ServerEventHandler eventHandler)
+ {
+ InitializeEvents();
+ this.serverEventsWorker.SubscribeToEvents(events, eventHandler);
+ }
+
+ public void UnsubscribeFromEvents( events)
+ {
+ if (null != this.serverEventsWorker)
+ {
+ this.serverEventsWorker.UnsubscribeFromEvents(events);
+ }
+ }
+
+ public void UnsubscribeAllEvents()
+ {
+ if (null != this.serverEventsWorker)
+ {
+ this.serverEventsWorker.Dispose();
+ this.serverEventsWorker = null;
+ }
+ }
+
+ public void StartEvents()
+ {
+ if (null != this.serverEventsWorker)
+ {
+ this.serverEventsWorker.StartEvents();
+ }
+ }
+
+ public void StopEvents()
+ {
+ if (null != this.serverEventsWorker)
+ {
+ this.serverEventsWorker.StopEvents();
+ }
+ }
+
+ public event ServerEventHandler ServerEvent
+ {
+ add
+ {
+ InitializeEvents();
+ this.serverEventsWorker.AddDefaultEventHandler(value);
+ }
+
+ remove
+ {
+ if (null != this.serverEventsWorker)
+ {
+ this.serverEventsWorker.RemoveDefaultEventHandler(value);
+ }
+ }
+ }
+
+ private void InitializeEvents()
+ {
+ if (null == serverEventsWorker)
+ {
+ serverEventsWorker = new (parent);
+
+ }
+ }
+
+ private serverEventsWorker;
+ private parent;
+ }
+
+
+
+
+
+ private Events events;
+ public Events Events
+ {
+ [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)]
+ get
+ {
+ if(Microsoft.SqlServer.Server.SqlContext.IsAvailable)
+ throw new SmoException(ExceptionTemplates.SmoSQLCLRUnAvailable);
+ if (null == this.events)
+ {
+ this.events = new Events(this);
+ }
+ return this.events;
+ }
+ }
+
+
+
+
+
+
+ protected Parameter() :
+ base()
+ {
+ }
+
+
+
+
+
+ public StoredProcedureParameter(StoredProcedure storedProcedure, System.String name, DataType dataType)
+ {
+ ValidateName(name);
+ this.key = new SimpleObjectKey(name);
+ this.Parent = storedProcedure;
+ this.DataType = dataType;
+ }
+
+
+
+
+
+ public UserDefinedFunctionParameter(UserDefinedFunction userDefinedFunction, System.String name, DataType dataType)
+ {
+ ValidateName(name);
+ this.key = new SimpleObjectKey(name);
+ this.Parent = userDefinedFunction;
+ this.DataType = dataType;
+ }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Directory.Build.props b/src/Directory.Build.props
new file mode 100644
index 00000000..76f0a7f2
--- /dev/null
+++ b/src/Directory.Build.props
@@ -0,0 +1,103 @@
+
+
+
+
+ target\distrib
+
+
+ $(EnlistmentRoot)\$(TargetFolder)\gac
+
+
+ false
+
+
+ $(MSBuildAllProjects);$(MSBuildThisFileFullPath)
+
+ Debug
+ $(EnlistmentRoot)\$(TargetFolder)
+ $(BaseOutputPath)\$(Configuration)
+
+ $(EnlistmentRoot)\obj\$([MSBuild]::MakeRelative('$(EnlistmentRoot)\', $(MSBuildProjectDirectory)))
+
+ $(EnlistmentRoot)\SmoBuild\
+ netstandard
+ netstandard
+ netcore
+ netfx
+ CSharp.NetFx.props
+ CSharp.NetCore.props
+ CSharp.NetStandard.props
+ $(NoWarn);NU5125;MSB3026
+ true
+ $(EnlistmentRoot)\packages
+ false
+
+ true
+ $(MicrosoftDataBuild)
+ true
+ $(DefineConstants);MICROSOFTDATA
+
+
+
+ true
+
+ $(DefineConstants);APTCA_ENABLED
+
+
+
+ pdbonly
+ true
+
+
+ full
+ true
+
+
+ $(EnlistmentRoot)\src\Microsoft\Data\Tools.Sql.BatchParser
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo.Broker
+ $(EnlistmentRoot)\src\Codegen
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\ConnectionInfo
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Dmf
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Dmf.Common
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\HadrData
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\HadrModel
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo.Notebook
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\PolicyEnum
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\RegisteredServers
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo.RegSvrEnum
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Sdk\Sfc
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo.Extended
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\SmoMetadataProvider
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\SqlAssessment
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\SqlClrProvider
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\SqlEnum
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\SqlScriptPublish
+ $(EnlistmentRoot)\src\FunctionalTest\Framework
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\Smo.WmiEnum
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\XEvent
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\XEventDbScoped
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\XEventDbScopedEnum
+ $(EnlistmentRoot)\src\Microsoft\SqlServer\Management\XEventEnum
+
+
+
+ 160.22504.0
+ Microsoft.Data.SqlClient
+ 3.0.0
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Directory.Build.targets b/src/Directory.Build.targets
new file mode 100644
index 00000000..b2fd366a
--- /dev/null
+++ b/src/Directory.Build.targets
@@ -0,0 +1,94 @@
+
+
+
+
+ $(MSBuildAllProjects);$(MSBuildThisFileFullPath)
+ AssemblyVersionInfo.cs
+
+
+
+
+
+
+ $(rootnamespace)
+ %(filename)
+ %(filename)
+ %(filename)
+ %(filename)
+ %(namespace)
+ %(resourcename)
+
+
+
+
+
+
+
+
+
+
+
+ $(RootNamespace)
+ $(AssemblyName)
+ $(DefineConstants);EXCLUDE_ASSEMBLY_VERSION_INFO
+ $(DefineConstants);CLSCOMPLIANT
+ [assembly: System.CLSCompliant(true)]
+ $(AssemblyFileVersion)$(ClsCompliant)
+
+$(ClsCompliance)
+#if EXCLUDE_ASSEMBLY_VERSION_INFO
+#else
+namespace $(Namespace)
+{
+internal static class AssemblyVersionInfo
+{
+public const string VersionString = "$(AssemblyVersion)"%3B
+public const int MajorVersion = $(AssemblyMajorVersion)%3B
+}
+}
+#endif
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <_Parameter1>%(AssemblyMetadata.Identity)
+ <_Parameter2>%(AssemblyMetadata.Value)
+
+
+ <_Parameter1>%(InternalsVisibleTo.Identity)
+ <_Parameter1 Condition="$(SignAssembly) == 'true'">%(InternalsVisibleTo.Identity), PublicKey=%(InternalsVisibleTo.Key)
+
+
+
+
+
+
+ $([System.Text.RegularExpressions.Regex]::Replace('$(DefineConstants)', '\bTRACE\;?\b', ';'))
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Directory.Build.props b/src/FunctionalTest/Directory.Build.props
new file mode 100644
index 00000000..2989435b
--- /dev/null
+++ b/src/FunctionalTest/Directory.Build.props
@@ -0,0 +1,21 @@
+
+
+
+ $(MSBuildAllProjects);$(MSBuildThisFileFullPath)
+ false
+
+
+
+ $(NetfxVersion);netcoreapp3.1
+
+
+ false
+
+ true
+
+
+
+ $(EnlistmentRoot)\src\FunctionalTest\Framework
+ $(EnlistmentRoot)\src\FunctionalTest\Smo
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Dockerfiles/150Linux/dockerfile b/src/FunctionalTest/Dockerfiles/150Linux/dockerfile
new file mode 100644
index 00000000..b426ec8a
--- /dev/null
+++ b/src/FunctionalTest/Dockerfiles/150Linux/dockerfile
@@ -0,0 +1,20 @@
+FROM ubuntu:18.04
+
+# Install prerequisites including repo config for SQL server and PolyBase.
+RUN export DEBIAN_FRONTEND=noninteractive && \
+ apt-get update && \
+ apt-get install -y gnupg && \
+ apt-get install -yq apt-transport-https curl && \
+ # Get official Microsoft repository configuration
+ curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
+ curl https://packages.microsoft.com/config/ubuntu/18.04/mssql-server-2019.list | tee /etc/apt/sources.list.d/mssql-server-2019.list && \
+ curl https://packages.microsoft.com/config/ubuntu/18.04/prod.list | tee /etc/apt/sources.list.d/msprod.list && \
+ apt-get update && \
+ # Install PolyBase will also install SQL Server via dependency mechanism.
+ apt-get install -y mssql-server-polybase && \
+ # Cleanup the Dockerfile
+ apt-get clean && \
+ rm -rf /var/lib/apt/lists
+
+# Run SQL Server process
+CMD /opt/mssql/bin/sqlservr
diff --git a/src/FunctionalTest/Framework/Helpers/AzureKeyVaultHelper.cs b/src/FunctionalTest/Framework/Helpers/AzureKeyVaultHelper.cs
new file mode 100644
index 00000000..d2c398a1
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/AzureKeyVaultHelper.cs
@@ -0,0 +1,148 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Security;
+using System.Security.Cryptography.X509Certificates;
+using Azure.Identity;
+using Azure.Security.KeyVault.Secrets;
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ ///
+ /// Retrieves a decrypted secret from Azure Key Vault or environment using certificate auth or client secret or managed identity
+ ///
+ public class AzureKeyVaultHelper
+ {
+ ///
+ /// The set of certificate thumbprints associated with the service principal.
+ /// If this collection is non-empty, AzureApplicationId and AzureTenantId must also be set to valid values.
+ /// Set these properties to use certificate-based authentication without relying on environment variables to specify the certificate.
+ ///
+ public IEnumerable CertificateThumbprints { get; set; }
+ ///
+ /// The Azure application id associated with the service principal
+ ///
+ public string AzureApplicationId { get; set; }
+ ///
+ /// The Azure tenant id associated with the service principal
+ ///
+ public string AzureTenantId { get; set; }
+ ///
+ /// The name of the Azure key vault where test secrets are stored.
+ ///
+ public string KeyVaultName { get; private set; }
+
+ private static readonly IDictionary secretCache = new Dictionary();
+ private static readonly object syncObj = new object();
+ public static readonly string SSMS_TEST_SECRET_PREFIX = "SQLA-SSMS-Test-";
+
+ ///
+ /// Constructs a new AzureKeyVaultHelper that relies on an instance of Azure.Identity.DefaultAzureCredential to access the given vault.
+ ///
+ ///
+ public AzureKeyVaultHelper(string keyVaultName)
+ {
+
+ KeyVaultName = keyVaultName;
+ CertificateThumbprints = Enumerable.Empty();
+ }
+ ///
+ /// Converts the secretName to an AKV resource URL and retrieves its decrypted value
+ /// If the value exists as an environment variable, the environment variable value is used.
+ /// Note that / characters in the secretName are replaced by - and _ by a space before the lookup.
+ ///
+ ///
+ ///
+ public string GetDecryptedSecret(string secretName)
+ {
+ var secret = string.Empty;
+ var lookupName = secretName.Replace('/', '-').Replace("_", "");
+ lock (syncObj)
+ {
+ if (secretCache.ContainsKey(secretName))
+ {
+ secret = secretCache[secretName].SecureStringToString();
+ }
+ }
+ if (string.IsNullOrEmpty(secret))
+ {
+ TraceHelper.TraceInformation("Looking for secret {0} using name {1}. Starting with environment variables.", secretName, lookupName);
+ secret = Environment.GetEnvironmentVariable(lookupName);
+ }
+ if (string.IsNullOrEmpty(secret))
+ {
+ Azure.Core.TokenCredential credential = new DefaultAzureCredential();
+ foreach (var thumbprint in CertificateThumbprints ?? Enumerable.Empty())
+ {
+ var certificate = FindCertificate(thumbprint);
+ if (certificate != null)
+ {
+ credential = new ClientCertificateCredential(AzureTenantId, AzureApplicationId, certificate);
+ }
+ break;
+ }
+
+ var secretIdentifier = $"https://{KeyVaultName}.vault.azure.net/secrets/{lookupName}";
+ TraceHelper.TraceInformation("Secret {0} not set as environment variable. Looking in AKV for {1}.", secretName, secretIdentifier);
+ try
+ {
+ var secretClient = new SecretClient(new Uri($"https://{KeyVaultName}.vault.azure.net"), credential);
+ secret = secretClient.GetSecret(lookupName).Value.Value;
+ }
+ catch (Exception e)
+ {
+ Console.WriteLine(@"Got Exception fetching secret. Type:{0}, Inner:{1}, Outer:{2}", e.GetType(), e.InnerException, e);
+ throw;
+ }
+ // Note we aren't bothering to cache secrets we found from GetEnvironmentVariable since that API is already fast
+ lock (syncObj)
+ {
+ secretCache[secretName] = secret.StringToSecureString();
+ }
+ }
+ return secret;
+ }
+
+ private static X509Certificate2 FindCertificate(string thumbprint)
+ {
+ X509Certificate2 certificate = null;
+ if (System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows))
+ {
+ var store = new X509Store(StoreName.My, StoreLocation.LocalMachine);
+ try
+ {
+ store.Open(OpenFlags.ReadOnly);
+ X509Certificate2Collection certificateCollection = store.Certificates.Find(X509FindType.FindByThumbprint, thumbprint, validOnly: false);
+ if (certificateCollection.Count == 0)
+ {
+ TraceHelper.TraceInformation("Couldn't find Smo cert {0} in local machine. Looking in current user", thumbprint);
+ var userStore = new X509Store(StoreName.My, StoreLocation.CurrentUser);
+ userStore.Open(OpenFlags.ReadOnly);
+ try
+ {
+ certificateCollection = userStore.Certificates.Find(X509FindType.FindByThumbprint,
+ thumbprint, validOnly: false);
+ }
+ finally
+ {
+ userStore.Close();
+ }
+ }
+ if (certificateCollection.Count != 0)
+ {
+ TraceHelper.TraceInformation("Found cert {0}", thumbprint);
+ certificate = certificateCollection[0];
+ }
+ }
+ finally
+ {
+ store.Close();
+ }
+ }
+ return certificate;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Helpers/ConnectionHelpers.cs b/src/FunctionalTest/Framework/Helpers/ConnectionHelpers.cs
new file mode 100644
index 00000000..899e571f
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ConnectionHelpers.cs
@@ -0,0 +1,242 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Linq;
+using System.Reflection;
+using SMO = Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using System.Xml.Linq;
+using System.Xml;
+using System.IO;
+using System.Diagnostics;
+using System.Xml.XPath;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods and vars for getting connections used for tests
+ ///
+ public static class ConnectionHelpers
+ {
+ private class ConnectionData
+ {
+ public AzureKeyVaultHelper AzureKeyVaultHelper;
+ public readonly IDictionary> ServerDescriptors = new Dictionary>();
+ }
+ // ThreadStatic enables us to run tests in parallel
+ [ThreadStatic]
+ private static ConnectionData _serverConnections;
+
+ private static ConnectionData ServerConnections =>
+ _serverConnections ?? (_serverConnections = LoadConnStrings());
+
+ ///
+ /// Reads the connection strings defined in the embedded ConnectionStrings.xml resource
+ /// and puts them into a dictionary for easy access
+ ///
+ ///
+ private static ConnectionData LoadConnStrings()
+ {
+ var serverConnections = new ConnectionData();
+ var connectionDocument = LoadConnectionDocument();
+ var akvElement = connectionDocument.XPathSelectElement(@"//AkvAccess");
+ if (akvElement != null && akvElement.Element("VaultName") != null)
+ {
+ serverConnections.AzureKeyVaultHelper = new AzureKeyVaultHelper(akvElement.Element("VaultName").Value)
+ {
+ AzureApplicationId = akvElement.Element("AzureApplicationId")?.Value,
+ AzureTenantId = akvElement.Element("AzureTenantId")?.Value,
+ CertificateThumbprints = akvElement.Elements("Thumbprint").Select(s => s.Value).ToArray()
+ };
+ }
+ foreach (var descriptor in TestServerDescriptor.GetServerDescriptors(connectionDocument, serverConnections.AzureKeyVaultHelper))
+ {
+ //SqlTestTargetServersFilter env variable was empty/didn't exist or it contained this server, add to our overall list
+ var svr = new SMO.Server(new ServerConnection(new SqlConnection(descriptor.ConnectionString)));
+ TraceHelper.TraceInformation("Loaded connection string '{0}' = '{1}'{2}",
+ descriptor.Name,
+ new SqlConnectionStringBuilder(descriptor.ConnectionString).DataSource,
+ descriptor.BackupConnnectionStrings.Any() ?
+ "Backups = " + descriptor.BackupConnnectionStrings :
+ string.Empty);
+ serverConnections.ServerDescriptors.Add(descriptor.Name, new Tuple(svr, descriptor));
+ }
+
+ return serverConnections;
+ }
+
+ private static XDocument LoadConnectionDocument()
+ {
+ //Load up the connection string values from the embedded resource
+ using (Stream connStringsStream = GetConnectionXml())
+ {
+ return XDocument.Load(XmlReader.Create(connStringsStream));
+ }
+ }
+
+ private static Stream GetConnectionXml()
+ {
+ var privateConfigPath =
+ Environment.GetEnvironmentVariable("TestPath", EnvironmentVariableTarget.Process) ??
+ Environment.GetEnvironmentVariable("TestPath", EnvironmentVariableTarget.User) ??
+ Environment.GetEnvironmentVariable("TestPath", EnvironmentVariableTarget.Machine) ??
+ Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? Environment.CurrentDirectory;
+ Trace.TraceInformation($"Using '{privateConfigPath}' to look for ToolsConnectionInfo.xml");
+ var privateXmlPath = Path.Combine(privateConfigPath, "ToolsConnectionInfo.xml");
+ if (File.Exists(privateXmlPath))
+ {
+ TraceHelper.TraceInformation("Using private connection data from {0}", privateXmlPath);
+ return File.OpenRead(privateXmlPath);
+ }
+ throw new InvalidOperationException("No ToolsConnectionInfo.xml file found");
+ }
+
+ ///
+ /// Returns a list of SqlConnectionStringBuilders for servers from ConnectionInfo.xml
+ /// that pass all the SqlSupportedDimensionAttribute criteria specified on the Method
+ /// passed in.
+ ///
+ ///
+ /// An optional filter that accepts the server friendly name and returns true if it should be included in the search
+ ///
+ public static IDictionary> GetServerConnections(MethodInfo mi, Func filter = null)
+ {
+ var requiredFeatureAttributes =
+ mi.GetCustomAttributes(true)
+ .Concat(mi.DeclaringType.GetCustomAttributes()).ToArray();
+
+ var requiredFeatures = requiredFeatureAttributes.SelectMany(feature => feature.RequiredFeatures).Distinct().ToArray();
+
+ var serverConnections = new Dictionary>();
+ //We need to check each of the defined servers to see if they're flagged
+ foreach (KeyValuePair> serverConnectionPair in ServerConnections.ServerDescriptors.Where(kvp => filter?.Invoke(kvp.Key) ?? true))
+ {
+ // Make sure the required features for the test are enabled on the server
+ if (requiredFeatures.Any(feature => !serverConnectionPair.Value.Item2.EnabledFeatures.Contains(feature)))
+ {
+ continue;
+ }
+
+ // Make sure the test requires at least a feature the server is researved for
+ if (serverConnectionPair.Value.Item2.ReservedFor.Any() && !serverConnectionPair.Value.Item2.ReservedFor.Intersect(requiredFeatures).Any())
+ {
+ continue;
+ }
+
+ //For SqlSupportedDimensionAttributes we consider the server supported if
+ //ANY of the attributes return IsSupported is true
+
+ //Note we look at attributes on both the method and the class the method is declared in
+ var supportedDimensions =
+ mi.GetCustomAttributes(true)
+ .Concat(mi.DeclaringType.GetCustomAttributes()).ToArray();
+ //If we don't have any SupportedDimensionAttributes we default to it being supported for all servers
+
+ var exceptions = new List();
+
+ bool isSupported = supportedDimensions.Length == 0 ||
+ supportedDimensions.Any(a =>
+ {
+ try
+ {
+ return a.IsSupported(serverConnectionPair.Value.Item1, serverConnectionPair.Value.Item2,
+ serverConnectionPair.Key);
+ }
+ catch (Exception e)
+ {
+ // Something went wrong, continue on for now to see if any of the UnsupportedAttributes exclude
+ // this server from even being included (in which case we'll just ignore the error anyways). If
+ // we DON'T exclude the server though we'll rethrow the error further down so the test still fails
+ // since we can't tell if the server is actually supported.
+ exceptions.Add(e);
+ return true;
+ }
+ });
+ if (isSupported)
+ {
+ //For SqlUnsupportedDimensionAttributes we consider the server supported only
+ //if ALL of the unsupported attributes return IsSupported = true
+
+ //Note we look at attributes on both the method and the class the method is declared in
+ isSupported &= mi.GetCustomAttributes(true)
+ .Concat(mi.DeclaringType.GetCustomAttributes())
+ .Aggregate(true, (current, unsupportedDimensionAttribute) =>
+ {
+ try
+ {
+ return current & unsupportedDimensionAttribute.IsSupported(
+ serverConnectionPair.Value.Item1, serverConnectionPair.Value.Item2,
+ serverConnectionPair.Key);
+ }
+ catch (Exception e)
+ {
+ // Something went wrong, continue on for now to see if any of the other UnsupportedAttributes exclude
+ // this server from even being included (in which case we'll just ignore the error anyways). If
+ // we DON'T exclude the server though we'll rethrow the error further down so the test still fails
+ // since we can't tell if the server is actually supported.
+ exceptions.Add(e);
+ return current;
+ }
+ });
+ if (isSupported)
+ {
+ if (exceptions.Any())
+ {
+ // As far as we know we should have added this server, but exceptions occured during processing of the
+ // Supported/Unsupported attributes so we're in an unknown state - rethrow those exceptions here since
+ // we want to err on the side of assuming that this server was supposed to be included but something went wrong
+ throw new AggregateException(
+ "Exceptions thrown when determining Supported/Unsupported status for server " + serverConnectionPair.Key, exceptions);
+ }
+ //Create a copy of the builder so clients can modify it as they wish without affecting other tests
+ serverConnections.Add(serverConnectionPair.Key,
+ serverConnectionPair.Value.Item2.AllConnectionStrings.Select(
+ connString => new SqlConnectionStringBuilder(connString)));
+ }
+ }
+ }
+
+ return serverConnections;
+ }
+
+ ///
+ /// Returns the default database edition to use for the given friendly name
+ ///
+ ///
+ ///
+ public static DatabaseEngineEdition GetDefaultEdition(string targetServerFriendlyName)
+ {
+ return ServerConnections.ServerDescriptors.ContainsKey(targetServerFriendlyName)
+ ? ServerConnections.ServerDescriptors[targetServerFriendlyName].Item2.DatabaseEngineEdition
+ : DatabaseEngineEdition.Unknown;
+ }
+
+ ///
+ /// Returns connection strings for TestServerDescriptors that match the filter
+ ///
+ ///
+ ///
+ public static IEnumerable GetMatchingConnections(Func filter)
+ {
+ return ServerConnections.ServerDescriptors.Where(d => filter(d.Value.Item2)).Select(d => d.Value.Item2.ConnectionString);
+ }
+
+ ///
+ /// Returns an AzureKeyVaultHelper defined by the AkvAccess element of ToolsConnectionInfo.xml
+ ///
+ ///
+ public static AzureKeyVaultHelper GetAzureKeyVaultHelper()
+ {
+ return ServerConnections.AzureKeyVaultHelper;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/ConnectionMetrics.cs b/src/FunctionalTest/Framework/Helpers/ConnectionMetrics.cs
new file mode 100644
index 00000000..800e7d69
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ConnectionMetrics.cs
@@ -0,0 +1,130 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Threading;
+using Microsoft.SqlServer.Management.Common;
+using System.Collections.Generic;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Provides a ServerConnection instrumented to collect the number of connections and volume of data sent to and from the server.
+ ///
+ public class ConnectionMetrics : IDisposable
+ {
+ ///
+ /// How many distinct connections have been opened on the proxy
+ ///
+ public int ConnectionCount;
+ ///
+ /// Total bytes of data read from the proxy by the client
+ ///
+ public long BytesRead;
+ ///
+ /// Total bytes of data sent to the proxy by the client
+ ///
+ public long BytesSent;
+ ///
+ /// How many queries have been executed on the SqlConnection
+ ///
+ public int QueryCount;
+ ///
+ /// The ServerConnection to use for instrumented tests
+ ///
+ public readonly ServerConnection ServerConnection;
+ ///
+ /// The proxy used for instrumentation. Tests can add their own event handlers to the proxy if desired.
+ /// Those event handlers should perform minimal processing.
+ ///
+ public readonly GenericSqlProxy Proxy;
+ ///
+ /// A list of (timestamp, bytesread) tuples corresponding to each BytesRead event on the proxy
+ ///
+ public readonly List<(DateTimeOffset Timestamp, long BytesRead)> BytesReadHistogram = new List<(DateTimeOffset Timestamp, long BytesRead)>();
+ private ConnectionMetrics(ServerConnection serverConnection, GenericSqlProxy proxy)
+ {
+ Proxy = proxy;
+ ServerConnection = serverConnection;
+ proxy.OnConnect += Proxy_OnConnect;
+ proxy.OnWriteHost += Proxy_OnWriteHost;
+ proxy.OnWriteClient += Proxy_OnWriteClient;
+ serverConnection.StatementExecuted += ServerConnection_StatementExecuted;
+ }
+
+ ///
+ /// Resets all the metrics to 0 and clears the histogram
+ ///
+ public void Reset()
+ {
+ ConnectionCount = 0;
+ BytesRead = BytesSent = 0;
+ QueryCount = 0;
+ BytesReadHistogram.Clear();
+ }
+
+ private void ServerConnection_StatementExecuted(object sender, StatementEventArgs e)
+ {
+ QueryCount++;
+ }
+
+ private void Proxy_OnWriteClient(object sender, StreamWriteEventArgs e)
+ {
+ BytesRead += e.BytesWritten;
+ BytesReadHistogram.Add((DateTimeOffset.UtcNow, e.BytesWritten));
+ }
+
+ private void Proxy_OnWriteHost(object sender, StreamWriteEventArgs e)
+ {
+ BytesSent += e.BytesWritten;
+ }
+
+ private void Proxy_OnConnect(object sender, ProxyConnectionEventArgs e)
+ {
+ ConnectionCount++;
+ }
+
+ public void Dispose()
+ {
+ Proxy.OnConnect -= Proxy_OnConnect;
+ Proxy.OnWriteHost -= Proxy_OnWriteHost;
+ Proxy.OnWriteClient -= Proxy_OnWriteClient;
+ ServerConnection.StatementExecuted -= ServerConnection_StatementExecuted;
+ ServerConnection.SqlConnectionObject.Dispose();
+ Proxy.Dispose();
+ }
+
+ ///
+ /// Creates a ConnectionMetrics object for measure statements, data movement, and connections for the given connection string
+ ///
+ /// The connection string for the connection to measure
+ /// How many milliseconds of latency to inject in each client write. Default is 0
+ /// The specific port the proxy should use. If 0, a port is allocated dynamically.
+ ///
+ public static ConnectionMetrics SetupMeasuredConnection(string connectionString, int latencyPaddingMs = 0, int proxyPort = 0)
+ {
+ var proxy = new GenericSqlProxy(connectionString);
+ if (latencyPaddingMs > 0)
+ {
+ proxy.OnWriteClient += (o,e) => DelayWrite(latencyPaddingMs, e);
+ }
+ // If running these tests in a container you may need to set a specific port
+ // and expose that port in the dockerfile
+ var sqlConnection = new SqlConnection(proxy.Initialize(proxyPort));
+ var serverConnection = new ServerConnection(sqlConnection);
+ return new ConnectionMetrics(serverConnection, proxy);
+ }
+
+ static void DelayWrite(long delay, StreamWriteEventArgs args)
+ {
+ Thread.Sleep(Convert.ToInt32(delay));
+ }
+ }
+
+
+}
diff --git a/src/FunctionalTest/Framework/Helpers/DataExtensions.cs b/src/FunctionalTest/Framework/Helpers/DataExtensions.cs
new file mode 100644
index 00000000..c8234e4c
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/DataExtensions.cs
@@ -0,0 +1,48 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+#if NETSTANDARD2_0
+ public static class DataExtensions
+ {
+
+ //
+ // Summary:
+ // Provides strongly-typed access to each of the column values in the specified
+ // row. The System.Data.DataRowExtensions.Field``1(System.Data.DataRow,System.String)
+ // method also supports nullable types.
+ //
+ // Parameters:
+ // row:
+ // The input System.Data.DataRow, which acts as the this instance for the extension
+ // method.
+ //
+ // columnName:
+ // The name of the column to return the value of.
+ //
+ // Type parameters:
+ // T:
+ // A generic parameter that specifies the return type of the column.
+ //
+ // Returns:
+ // The value, of type T, of the System.Data.DataColumn specified by columnName.
+ //
+ // Exceptions:
+ // T:System.InvalidCastException:
+ // The value type of the underlying column could not be cast to the type specified
+ // by the generic parameter, T.
+ //
+ // T:System.IndexOutOfRangeException:
+ // The column specified by columnName does not occur in the System.Data.DataTable
+ // that the System.Data.DataRow is a part of.
+ //
+ // T:System.NullReferenceException:
+ // A null value was assigned to a non-nullable type.
+ public static T Field(this System.Data.DataRow row, string columnName)
+ {
+ return (T)row[columnName];
+ }
+ }
+#endif
+}
diff --git a/src/FunctionalTest/Framework/Helpers/DatabaseObjectHelpers.cs b/src/FunctionalTest/Framework/Helpers/DatabaseObjectHelpers.cs
new file mode 100644
index 00000000..27315f1e
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/DatabaseObjectHelpers.cs
@@ -0,0 +1,707 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.IO;
+using System.Linq;
+using System.Text;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods, constants, etc dealing with the SMO Database object
+ ///
+ public static class DatabaseObjectHelpers
+ {
+
+
+ ///
+ /// Restores a database from the specified backup file. It's the callers responsibility to ensure the server
+ /// has read access for the file.
+ ///
+ /// The files in the backup will be moved to the default data/log locations for the server
+ ///
+ /// The server where the DB is going to be restored to
+ /// The full path to the backup file
+ /// The new name of the database
+ /// If true, the database will be restored with the NORECOVERY option. False, by default
+ ///
+ public static Database RestoreDatabaseFromBackup(SMO.Server server, string dbBackupFile, string dbName, bool withNoRecovery = false)
+ {
+ // In theory, we should ask SQL if the file exists or not... because the path could be
+ // local to the server.
+ if (dbBackupFile.StartsWith(@"\\") && File.Exists(dbBackupFile))
+ {
+ throw new InvalidArgumentException($"DB Backup File '{dbBackupFile}' does not exist");
+ }
+
+ // Get the default location where we should place the restored data files
+ string dataFilePath = string.IsNullOrEmpty(server.Settings.DefaultFile) ? server.MasterDBPath : server.Settings.DefaultFile;
+ if (string.IsNullOrWhiteSpace(dataFilePath))
+ {
+ // We failed to get the path
+ throw new InvalidOperationException("Could not get database file path for restoring from backup");
+ }
+
+ // Get the default location where we should place the restored log files
+ string logFilePath = string.IsNullOrEmpty(server.Settings.DefaultLog) ? server.MasterDBLogPath : server.Settings.DefaultLog;
+ if (string.IsNullOrWhiteSpace(logFilePath))
+ {
+ // We failed to get the path
+ throw new InvalidOperationException("Could not get database log file path for restoring from backup");
+ }
+
+ StringBuilder sb = new StringBuilder();
+
+ sb.Append("RESTORE DATABASE " + dbName.SqlBracketQuoteString() + " FROM DISK = " + dbBackupFile.SqlSingleQuoteString() + " ");
+ BackupInfo backupInfo = GetBackupInfo(server, dbBackupFile);
+ //The files need to be moved to avoid collisions
+ int index = 0;
+ foreach (var file in backupInfo.Files)
+ {
+ //Type == L means it's a log file so put it in the log file location, all others go to data file location
+ string filePath = "L".Equals(file.FileType, StringComparison.OrdinalIgnoreCase)
+ ? logFilePath
+ : dataFilePath;
+ //Unique filename so new files don't collide either
+ string fileName = dbName + "_" + index + Path.GetExtension(file.PhysicalName);
+ string resultPath = Path.Combine(filePath, fileName);
+
+ string delimiter = index == 0 ? "WITH " : ", ";
+ sb.AppendLine(delimiter);
+ sb.Append("MOVE " + file.LogicalName.SqlSingleQuoteString() + " TO " + resultPath.SqlSingleQuoteString() + " ");
+ ++index;
+ }
+
+ if (withNoRecovery)
+ {
+ sb.Append(", NORECOVERY");
+ }
+
+ TraceHelper.TraceInformation(string.Format("Restoring database '{0}' from backup file '{1}'", dbName, dbBackupFile));
+ server.ConnectionContext.ExecuteNonQuery(sb.ToString());
+
+ server.Databases.Refresh();
+ return server.Databases[dbName];
+ }
+
+ private class BackupInfo
+ {
+ public BackupInfo()
+ {
+ Files = new List();
+ }
+
+ public List Files;
+ }
+
+ private class BackupFileInfo
+ {
+ public string LogicalName;
+ public string PhysicalName;
+ public string FileType;
+ }
+
+ private static BackupInfo GetBackupInfo(SMO.Server server, string dbBackupFile)
+ {
+ BackupInfo info = new BackupInfo();
+ string escapedDbBackupFile = SmoObjectHelpers.SqlEscapeSingleQuote(dbBackupFile);
+ string restoreFilelistCommand = $"RESTORE FILELISTONLY FROM DISK = '{escapedDbBackupFile}'";
+
+ using (SqlDataReader reader = server.ConnectionContext.ExecuteReader(restoreFilelistCommand))
+ {
+ while (reader != null && reader.Read())
+ {
+ BackupFileInfo fileInfo = new BackupFileInfo();
+ fileInfo.LogicalName = reader["LogicalName"] == null ? null : reader["LogicalName"].ToString();
+ fileInfo.PhysicalName = reader["PhysicalName"] == null ? null : reader["PhysicalName"].ToString();
+ fileInfo.FileType = reader["Type"] == null ? null : reader["Type"].ToString();
+ info.Files.Add(fileInfo);
+ }
+ }
+
+ return info;
+ }
+
+ ///
+ /// Creates a stored procedure definition with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static StoredProcedure CreateSPDefinition(this Database database, string spNamePrefix, string schema, string textBody, bool isSchemaBound = false)
+ {
+ var proc = string.IsNullOrEmpty(schema) ?
+ new StoredProcedure(database, SmoObjectHelpers.GenerateUniqueObjectName(spNamePrefix)) :
+ new StoredProcedure(database, SmoObjectHelpers.GenerateUniqueObjectName(spNamePrefix), schema);
+ proc.TextBody = textBody;
+ proc.TextHeader = string.Format("CREATE PROCEDURE {0}.{1} {2} AS",
+ SmoObjectHelpers.SqlBracketQuoteString(proc.Schema),
+ SmoObjectHelpers.SqlBracketQuoteString(proc.Name),
+ isSchemaBound ? "WITH SCHEMABINDING" : string.Empty);
+ TraceHelper.TraceInformation("Creating new stored procedure definition \"{0}\"", proc.Name);
+ return proc;
+ }
+
+ ///
+ /// Creates a basic table with a uniquely generated name prefixed by the specified prefix.
+ /// Optionally allows specifying the schema and columns to create. If no columns are given
+ /// a single default one will be added.
+ ///
+ /// The database on which to create the table.
+ /// The name prefix for the table.
+ /// The optional schema.
+ /// The optional table properties to use.
+ /// The column properties to use.
+ /// The index properties to use.
+ /// True to include a uniqueifier.
+ /// The SMO table.
+ public static Table CreateTable(
+ Database database,
+ string tableNamePrefix,
+ string schemaName = null,
+ TableProperties tableProperties = null,
+ ColumnProperties[] columnProperties = null,
+ IndexProperties[] indexProperties = null,
+ bool includeNameUniqueifier = true)
+ {
+ Table table = CreateTableDefinition(database, tableNamePrefix, schemaName, tableProperties, columnProperties, indexProperties, includeNameUniqueifier);
+
+ TraceHelper.TraceInformation("Creating new table \"{0}\".", table.Name);
+
+ table.Create();
+
+ return table;
+ }
+
+ ///
+ /// Creates a basic table with a uniquely generated name prefixed by the specified prefix.
+ /// Optionally allows specifying the schema and columns to create. If no columns are given
+ /// a single default one will be added.
+ ///
+ /// The database on which to create the table.
+ /// The name prefix for the table.
+ /// The optional schema.
+ /// The table properties to use.
+ /// The list of columns.
+ /// Indexes to add to the table.
+ /// Include a name uniqueifier to the table name.
+ /// The table.
+ public static Table CreateTableDefinition(
+ this Database database,
+ string tableNamePrefix,
+ string schemaName = null,
+ TableProperties tableProperties = null,
+ ColumnProperties[] columnProperties = null,
+ IndexProperties[] indexProperties = null,
+ bool includeNameUniqueifier = true)
+ {
+ Table table = string.IsNullOrEmpty(schemaName) ?
+ new Table(database, includeNameUniqueifier ? SmoObjectHelpers.GenerateUniqueObjectName(tableNamePrefix) : tableNamePrefix) :
+ new Table(database, includeNameUniqueifier ? SmoObjectHelpers.GenerateUniqueObjectName(tableNamePrefix) : tableNamePrefix, schemaName);
+
+ // Apply any table properties to the table.
+ //
+ if (tableProperties != null)
+ {
+ tableProperties.ApplyProperties(table);
+ }
+
+ if (columnProperties == null ||columnProperties.Any() == false)
+ {
+ if (tableProperties == null || !tableProperties.IsEdge)
+ {
+ // User didn't specify any columns, just add in a default one. In the
+ // case of edge tables columns will only be added to this collection if
+ // they are provided.
+ //
+ table.Columns.Add(new Column(table, "col_1", new DataType(SqlDataType.Int)));
+ }
+ }
+ else
+ {
+ foreach (ColumnProperties columnProps in columnProperties)
+ {
+ table.Columns.Add(
+ new Column(
+ table,
+ columnProps.Name,
+ columnProps.SmoDataType)
+ {
+ Nullable = columnProps.Nullable,
+ Identity = columnProps.Identity
+ });
+ }
+ }
+
+ // Append indexes during creation if there are any.
+ //
+ if (indexProperties != null && indexProperties.Any())
+ {
+ foreach (IndexProperties index in indexProperties)
+ {
+ var idx = new SMO.Index(table, index.Name)
+ {
+ IndexType = index.IndexType,
+ IndexKeyType = index.KeyType,
+ };
+
+ if (index.Columns != null && index.Columns.Any())
+ {
+ foreach (Column column in index.Columns)
+ {
+ idx.IndexedColumns.Add(new IndexedColumn(idx, column.Name));
+ }
+ }
+
+ if (index.ColumnNames != null && index.ColumnNames.Any())
+ {
+ foreach (string columnName in index.ColumnNames)
+ {
+ idx.IndexedColumns.Add(new IndexedColumn(idx, columnName));
+ }
+ }
+
+ table.Indexes.Add(idx);
+ }
+ }
+
+ TraceHelper.TraceInformation("Creating new table definition \"{0}\" with {1} columns and {2} indexes.", table.Name, table.Columns.Count, table.Indexes.Count);
+
+ return table;
+ }
+
+ ///
+ /// Creates a basic table with a uniquely generated name prefixed by the specified prefix.
+ /// Optionally allows specifying the columns to create. If no columns are given a single
+ /// default one will be added. Will be part of the "dbo" schema.
+ ///
+ /// The database this table will be created in.
+ /// The table name prefix for the table.
+ /// The list of columns.
+ /// The table object.
+ public static Table CreateTable(this Database database, string tableNamePrefix, params ColumnProperties[] columns)
+ {
+ return CreateTable(database, tableNamePrefix, schemaName: "dbo", tableProperties: null, columnProperties: columns);
+ }
+
+ ///
+ /// Create an external language.
+ ///
+ /// The target database
+ /// The language name
+ /// The newly created external language
+ public static ExternalLanguage CreateExternalLanguageDefinition(this SMO.Database db, string languageName)
+ {
+ TraceHelper.TraceInformation($"Creating external language [{languageName}] for database [{db.Name}]");
+ SMO.ExternalLanguage lang = new SMO.ExternalLanguage(db, languageName);
+ return lang;
+ }
+
+ ///
+ /// Call CreateExternalLanguageDefinition to create an external language on the server.
+ ///
+ /// The target database
+ /// The language name
+ /// Name of the extension .dll or .so file
+ /// The full file path to the .zip or tar.gz file containing the extensions code
+ /// The content of the language as a hex literal, similar to assemblies.
+ /// The platform language was created for
+ /// The newly created external language
+ /// One and only one from externalLangFilePath or externalLangContentBits must be specified.
+ public static ExternalLanguage CreateExternalLanguage(
+ this SMO.Database db,
+ string languageName,
+ string externalLangFileName,
+ ExternalLanguageFilePlatform platform = ExternalLanguageFilePlatform.Default,
+ string externalLangFilePath = null,
+ byte[] externalLangContentBits = null)
+ {
+ // At least a language file path or language binary needs to be provided
+ //
+ if (string.IsNullOrEmpty(externalLangFilePath) && externalLangContentBits == null)
+ {
+ throw new InvalidOperationException("At least a language file path or language binary needs to be provided.");
+ }
+
+ var externalLanguage = CreateExternalLanguageDefinition(db, languageName);
+ if (!string.IsNullOrEmpty(externalLangFilePath))
+ {
+ externalLanguage.AddFile(externalLangFileName, externalLangFilePath, platform);
+ }
+ else
+ {
+ externalLanguage.AddFile(externalLangFileName, externalLangContentBits, platform);
+ }
+
+ externalLanguage.Create();
+ return externalLanguage;
+ }
+
+ ///
+ /// Create an external library.
+ ///
+ /// The target database
+ /// The library name
+ /// The newly created external library
+ public static ExternalLibrary CreateExternalLibraryDefinition(this SMO.Database db, string libraryName)
+ {
+ TraceHelper.TraceInformation("Creating external library [{0}] for database [{1}]", libraryName, db.Name);
+ SMO.ExternalLibrary lib = new SMO.ExternalLibrary(db, libraryName);
+ return lib;
+ }
+
+ ///
+ /// Call CreateExternalLibraryDefinition to create an external library on the server.
+ ///
+ /// The target database
+ /// The library name
+ ///
+ ///
+ /// The newly created external library
+ public static ExternalLibrary CreateExternalLibrary(this SMO.Database db, string libraryName, string libraryContent, ExternalLibraryContentType contentType)
+ {
+ var externalLibrary = CreateExternalLibraryDefinition(db, libraryName);
+ externalLibrary.ExternalLibraryLanguage = "R";
+ externalLibrary.Create(libraryContent, contentType);
+ return externalLibrary;
+ }
+
+ ///
+ /// Creates a user defined function definition with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static UserDefinedFunction CreateUdfDefinition(this Database database, string udfNamePrefix, string schema, string textBody, bool isSchemaBound = false)
+ {
+ var udf = string.IsNullOrEmpty(schema) ?
+ new UserDefinedFunction(database, SmoObjectHelpers.GenerateUniqueObjectName(udfNamePrefix)) :
+ new UserDefinedFunction(database, SmoObjectHelpers.GenerateUniqueObjectName(udfNamePrefix), schema);
+ udf.TextBody = textBody;
+ udf.TextHeader = string.Format("CREATE FUNCTION {0}.{1} {2} AS",
+ SmoObjectHelpers.SqlBracketQuoteString(udf.Schema),
+ SmoObjectHelpers.SqlBracketQuoteString(udf.Name),
+ isSchemaBound ? "WITH SCHEMABINDING" : string.Empty);
+ TraceHelper.TraceInformation("Creating new user defined function definition \"{0}\"", udf.Name);
+ return udf;
+ }
+
+ ///
+ /// Creates a user defined function with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static UserDefinedFunction CreateUdf(this Database database, string udfNamePrefix, string schema, string textBody, bool isSchemaBound = false)
+ {
+ var udf = database.CreateUdfDefinition(udfNamePrefix, schema, textBody, isSchemaBound);
+ udf.Create();
+ return udf;
+ }
+
+ ///
+ /// Creates a view definition with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ /// You do not need to create new column objects (normally required when creating a view in SMO), this
+ /// method will create the new columns for you based on the specified columns
+ public static View CreateViewDefinition(this Database database, string viewNamePrefix, string schema, string textBody, bool isSchemaBound = false)
+ {
+ var view = string.IsNullOrEmpty(schema) ?
+ new View(database, SmoObjectHelpers.GenerateUniqueObjectName(viewNamePrefix)) :
+ new View(database, SmoObjectHelpers.GenerateUniqueObjectName(viewNamePrefix), schema);
+ view.TextBody = textBody;
+ view.TextHeader = string.Format("CREATE VIEW {0}.{1} {2} AS",
+ SmoObjectHelpers.SqlBracketQuoteString(view.Schema),
+ SmoObjectHelpers.SqlBracketQuoteString(view.Name),
+ isSchemaBound ? "WITH SCHEMABINDING" : string.Empty);
+ TraceHelper.TraceInformation("Creating new view definition \"{0}\" with {1} columns", view.Name, view.Columns.Count);
+ return view;
+ }
+
+ ///
+ /// Creates a view with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static View CreateView(this Database database, string viewNamePrefix, string schema, string textBody, bool isSchemaBound = false)
+ {
+ var view = database.CreateViewDefinition(viewNamePrefix, schema, textBody, isSchemaBound);
+ view.Create();
+ return view;
+ }
+
+ ///
+ /// Creates a memory-optimized-enabled filegroup (pre-requirement for creating Hekaton tables).
+ /// New filegroup (and a file within it) are created alongside with default (primary) filegroup
+ /// of a given database.
+ ///
+ /// The SMO database object.
+ /// The filegroup name.
+ /// Newly created filegroup.
+ public static FileGroup CreateMemoryOptimizedFileGroup(this Database database, string filegroupName)
+ {
+ FileGroup memoryOptimizedFg = new FileGroup(database, filegroupName, FileGroupType.MemoryOptimizedDataFileGroup);
+ memoryOptimizedFg.Create();
+
+ DataFile dataFile = new DataFile(memoryOptimizedFg, filegroupName)
+ {
+ FileName = PathWrapper.Combine(PathWrapper.GetDirectoryName(database.FileGroups[0].Files[0].FileName), filegroupName)
+ };
+
+ dataFile.Create();
+
+ return memoryOptimizedFg;
+ }
+
+ ///
+ /// Create a DatabaseDdlTrigger with a uniquely generated name prefixed by the specified prefix.
+ ///
+ /// The prefix name of the created trigger.
+ /// For/After ddl event, e.g. "FOR ALTER" or "AFTER CREATE".
+ /// The tsql body of the trigger.
+ ///
+ public static DatabaseDdlTrigger CreateDatabaseDdlTrigger(this Database database, string triggerNamePrefix, string forOrAfterEvent, string textBody)
+ {
+ var triggerDb = new DatabaseDdlTrigger(database, SmoObjectHelpers.GenerateUniqueObjectName(triggerNamePrefix));
+ triggerDb.TextHeader = string.Format("CREATE TRIGGER {0} ON DATABASE {1} AS", SmoObjectHelpers.SqlBracketQuoteString(triggerDb.Name), forOrAfterEvent);
+ triggerDb.TextBody = textBody;
+ triggerDb.ImplementationType = ImplementationType.TransactSql;
+ triggerDb.ExecutionContext = DatabaseDdlTriggerExecutionContext.Caller;
+ triggerDb.Create();
+ return triggerDb;
+ }
+
+ ///
+ /// Creates a new FileGroup with the specific name, and then adds a new DataFile on the created FileGroup.
+ ///
+ ///
+ ///
+ ///
+ public static FileGroup CreateFileGroupWithDataFile(this Database database, string fileGroupName)
+ {
+ // Create a FileGroup object with the specific name
+ FileGroup fileGroup = new FileGroup(database, fileGroupName);
+ fileGroup.Create();
+
+ // Create a DataFile on the FileGroup that is created before.
+ DataFile dataFile = new DataFile(fileGroup, fileGroupName);
+ dataFile.FileName = string.Format("{0}.mdf", PathWrapper.Combine(database.PrimaryFilePath, dataFile.Name));
+ dataFile.Create();
+
+ return fileGroup;
+ }
+
+ ///
+ /// Creates the partition scheme and specifies the file group for each partiton.
+ ///
+ ///
+ ///
+ /// Specify the values that divide the data, e.g. {"3","6","9"} or {"1/1/2013","1/1/2014","1/1/2015"}
+ /// The column type that the partition function uses, e.g. Int or DateTime
+ ///
+ public static PartitionScheme CreatePartitionSchemeWithFileGroups(this Database database, string partitionPrefixName, object[] rangeValues, DataType dataType)
+ {
+ // The partition number should be one more than the size of the rangeValues.
+ int partitionNumber = rangeValues.Count() + 1;
+
+ // Create the fileGroups with the data file for each partition
+ Collection fileGroups = new Collection();
+ for (int id = 0; id < partitionNumber; id++)
+ {
+ // Azure SQL DB only supports PRIMARY filegroup
+ if (database.DatabaseEngineType == DatabaseEngineType.SqlAzureDatabase)
+ {
+ fileGroups.Add(database.FileGroups["PRIMARY"]);
+ }
+ else
+ {
+ fileGroups.Add(DatabaseObjectHelpers.CreateFileGroupWithDataFile(database,
+ String.Format("{0}_FG{1}", partitionPrefixName, id.ToString())));
+ }
+ }
+
+ // Create a partition function with the specific range values and data type for the partition scheme.
+ PartitionFunction partitionFunction = new PartitionFunction(database, partitionPrefixName + "_PF");
+ partitionFunction.RangeValues = rangeValues;
+
+ PartitionFunctionParameter partitionFunctionParameter = new PartitionFunctionParameter(partitionFunction, dataType);
+ partitionFunction.PartitionFunctionParameters.Add(partitionFunctionParameter);
+
+ partitionFunction.Create();
+
+ // Create a partition scheme and specify the partition function and the file groups
+ PartitionScheme partitionScheme = new PartitionScheme(database, partitionPrefixName + "_PS");
+
+ partitionScheme.PartitionFunction = partitionFunction.Name;
+ for (int id = 0; id < partitionNumber; id++)
+ {
+ partitionScheme.FileGroups.Add(fileGroups[id].Name);
+ }
+
+ partitionScheme.Create();
+
+ return partitionScheme;
+ }
+
+ ///
+ /// Take a full backup of the database
+ ///
+ /// The database
+ public static void TakeFullBackup(this Database database)
+ {
+ Backup backup = new Backup
+ {
+ Action = BackupActionType.Database,
+ Database = database.Name,
+ BackupSetDescription = string.Format("Full backup of {0}", database.Name),
+ BackupSetName = database.Name,
+ ExpirationDate = DateTime.Now.AddYears(1),
+ LogTruncation = BackupTruncateLogType.Truncate,
+ Incremental = false, //full backup
+ };
+
+ BackupDeviceItem bdi = new BackupDeviceItem(string.Format("{0}.bak", database.Name), DeviceType.File);
+ backup.Devices.Add(bdi);
+
+ backup.SqlBackup(database.Parent);
+ }
+
+ ///
+ /// Creates a new User in this database mapped to the specified login
+ ///
+ /// The database to add the user to
+ /// The name to give the user
+ /// The name of the login this user maps to
+ /// Optional - the password for the user
+ /// The created User object
+ public static User CreateUser(this Database database, string name, string loginName, string password = "")
+ {
+ var user = database.CreateUserDefinition(name, loginName);
+ TraceHelper.TraceInformation("Creating new User definition '{0}' in database '{1}'{2}{3}",
+ name,
+ database.Name,
+ string.IsNullOrEmpty(loginName) ? string.Empty : " for login " + loginName,
+ string.IsNullOrEmpty(loginName) ? string.Empty : " with password '" + password + "'");
+ if (string.IsNullOrEmpty(password))
+ {
+ user.Create();
+ }
+ else
+ {
+ user.Create(password);
+ }
+ return user;
+ }
+
+ ///
+ /// Creates a User definition for this database mapped to the specified login.
+ /// This does not actually create the User on the server, just the definition.
+ ///
+ /// The database to create the definition on
+ /// The name to give the user
+ /// The name of the login this user maps to
+ /// The User object definition
+ public static User CreateUserDefinition(this Database database, string name, string loginName = "")
+ {
+ TraceHelper.TraceInformation("Creating new User definition '{0}'{1}",
+ name,
+ string.IsNullOrEmpty(loginName) ? string.Empty : " for login " + loginName);
+ var user = new User(database, name);
+ if (!string.IsNullOrEmpty(loginName))
+ {
+ user.Login = loginName;
+ }
+ return user;
+ }
+
+ ///
+ /// Creates a UDDT with the given name and default content
+ ///
+ ///
+ ///
+ ///
+ public static UserDefinedDataType CreateUserDefinedDataType(this Database database, string name)
+ {
+ var uddt = new UserDefinedDataType(database, name);
+ uddt.Create();
+ return uddt;
+ }
+
+ ///
+ /// Creates a user defined aggregate with the given name and default content
+ ///
+ ///
+ ///
+ ///
+ public static UserDefinedAggregate CreateUserDefinedAggregate(this Database database, string name)
+ {
+ var uda = new UserDefinedAggregate(database, name);
+
+ uda.Parameters.Add(new UserDefinedAggregateParameter(uda, "udaParam", DataType.Int));
+ uda.Create();
+ return uda;
+ }
+
+ ///
+ /// Creates a database scoped credential with the given name and default content
+ ///
+ ///
+ ///
+ ///
+ public static DatabaseScopedCredential CreateDatabaseScopedCredential(this Database database, string name)
+ {
+ var dsc = new DatabaseScopedCredential(database, name);
+ dsc.Create("userName", Guid.NewGuid().ToString());
+ return dsc;
+ }
+
+ ///
+ /// Creates an asymmetric key with the given name and default content
+ ///
+ ///
+ ///
+ ///
+ public static AsymmetricKey CreateAsymmetricKey(this Database database, string name)
+ {
+ var key = new AsymmetricKey(database, name);
+ key.Create(AsymmetricKeyEncryptionAlgorithm.Rsa1024, Guid.NewGuid().ToString());
+ return key;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/DirectoryHelpers.cs b/src/FunctionalTest/Framework/Helpers/DirectoryHelpers.cs
new file mode 100644
index 00000000..c4281a0e
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/DirectoryHelpers.cs
@@ -0,0 +1,40 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.IO;
+using System.Threading;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ public static class DirectoryHelpers
+ {
+ ///
+ /// Calls delete on a directory and then waits until Directory.Exists returns false, or until
+ /// the retry count is exceeded.
+ ///
+ ///
+ ///
+ ///
+ /// This is to get around an issue where the call to Directory.Delete only marks the directory for deletion, so
+ /// calling that and then immediately trying to write a file to that location occasionally results in an IOException
+ public static void SynchronousDirectoryDelete(string path, int retryCount = 3, int retrySleepTimeMs = 1000)
+ {
+ Directory.Delete(path, true);
+ //An issue was occuring where the directory wasn't fully deleted until after the call
+ //to CreateDirectory - which resulted in the directory not existing when saving the files below.
+ //So now wait until it shows as actually deleted (or the retry count is exceeded)
+ int i = 0;
+ while (Directory.Exists(path))
+ {
+ if (i >= retryCount)
+ {
+ throw new InvalidOperationException(
+ string.Format("Directory '{0}' isn't deleted even after waiting", path));
+ }
+ ++i;
+ Thread.Sleep(retrySleepTimeMs);
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/ExceptionHelpers.cs b/src/FunctionalTest/Framework/Helpers/ExceptionHelpers.cs
new file mode 100644
index 00000000..5412a24e
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ExceptionHelpers.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Text;
+
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods/values for Exceptions
+ ///
+ public static class ExceptionHelpers
+ {
+ ///
+ /// Builds up an exception message string by recursively iterating through all of the InnerException
+ /// children and adding their message to the string.
+ ///
+ ///
+ ///
+ public static string BuildRecursiveExceptionMessage(this Exception e)
+ {
+ StringBuilder msg = new StringBuilder();
+
+ while (e != null)
+ {
+ msg.AppendFormat("{0} -> ", e.Message);
+ e = e.InnerException;
+ }
+
+ //Trim off the last ->
+ if (msg.Length > 2)
+ {
+ msg.Length -= 3;
+ }
+
+ return msg.ToString();
+ }
+
+
+ ///
+ /// Checks if the specified exception is the expected exception
+ /// based on the exception message.
+ ///
+ /// Exception to check.
+ /// Expected exception message.
+ /// True, if the expected exception. False otherwise.
+ public static bool IsExpectedException(Exception e, string errorMessage)
+ {
+ bool expectedException = false;
+ while (e != null)
+ {
+ // validate expected error message
+ if (e.Message.Equals(errorMessage, StringComparison.OrdinalIgnoreCase))
+ {
+ expectedException = true;
+ break;
+ }
+
+ e = e.InnerException;
+ }
+
+ // if the expected error message was not found, the excepted was unexpected,
+ // rethrow and terminate execution
+ if (!expectedException)
+ {
+ return false;
+ }
+
+ TraceHelper.TraceInformation("Found expected exception {0} with error message '{1}'", e.GetType().Name, errorMessage);
+ return true;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/GenericSqlProxy.cs b/src/FunctionalTest/Framework/Helpers/GenericSqlProxy.cs
new file mode 100644
index 00000000..70da8235
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/GenericSqlProxy.cs
@@ -0,0 +1,255 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Net.Sockets;
+using System.Net;
+using System.Diagnostics;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Provides an in-memory proxy with callbacks that allow tests to run code before transmission and after receipt of
+ /// data on the wire
+ ///
+ [DebuggerDisplay("{connectionString}:[{Port}]")]
+ public class GenericSqlProxy : IDisposable
+ {
+ // We pick a buffer size that's large enough to hold most single replies so we don't over-inject latency
+ private const int BufferSizeBytes = 128 * 1024;
+ readonly string connectionString;
+ volatile bool disposed;
+ private TcpListener listener = null;
+ private readonly CancellationTokenSource tokenSource = new CancellationTokenSource();
+
+ ///
+ /// Constructs a GenericSqlProxy for the local default sql instance
+ ///
+ public GenericSqlProxy() : this(".")
+ {
+
+ }
+
+ ///
+ /// Construct a new GenericSqlProxy for the given connection string
+ ///
+ ///
+ public GenericSqlProxy(string connectionString)
+ {
+ this.connectionString = connectionString;
+ }
+
+ ///
+ /// The port through which the proxy redirects the sql connection
+ ///
+ public int Port { get; private set; }
+
+ ///
+ /// Initializes the proxy by opening the TCP listener and copying data between client and server
+ ///
+ /// local port number to use. 0 will use a random port
+ /// The connection string to use for the SqlConnection
+ public string Initialize(int localPort = 0)
+ {
+ var builder = new SqlConnectionStringBuilder(connectionString);
+ GetTcpInfoFromDataSource(builder.DataSource, out string hostName, out int port);
+ listener = new TcpListener(IPAddress.Loopback, localPort);
+ listener.Server.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.DontLinger, true);
+ listener.Server.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
+ listener.Start();
+ Port = ((IPEndPoint) listener.LocalEndpoint).Port;
+ Trace.TraceInformation($"Starting TcpListener on port {Port}");
+ var waitEvent = new ManualResetEvent(false);
+ Task.Factory.StartNew(() => { AsyncInit(listener, hostName, port, waitEvent); });
+ waitEvent.WaitOne();
+
+ return new SqlConnectionStringBuilder(builder.ConnectionString)
+ {
+ DataSource = $"tcp:127.0.0.1,{Port}"
+ }.ConnectionString;
+ }
+
+ private void AsyncInit(TcpListener tcpListener, string hostName, int port, ManualResetEvent waitEvent)
+ {
+
+ while (!disposed)
+ {
+ var accept = tcpListener.AcceptTcpClientAsync();
+ waitEvent.Set();
+ if (accept.Wait(1000, tokenSource.Token) && !tokenSource.IsCancellationRequested)
+ {
+ Trace.TraceInformation($"{DateTime.Now} Accepted!");
+ var localClient = accept.GetAwaiter().GetResult();
+ Trace.TraceInformation($"{DateTime.Now} Connecting!");
+ OnConnect?.Invoke(this, new ProxyConnectionEventArgs(localClient));
+ var remoteClient = new TcpClient() {NoDelay = true};
+ tokenSource.Token.Register(() =>
+ {
+ localClient.Dispose();
+ remoteClient.Dispose();
+ });
+ remoteClient.ConnectAsync(hostName, port).Wait(tokenSource.Token);
+ if (!tokenSource.IsCancellationRequested)
+ {
+
+
+ Task.Factory.StartNew(() => { ForwardToSql(localClient, remoteClient); });
+ Task.Factory.StartNew(() => { ForwardToClient(localClient, remoteClient); });
+ }
+ else
+ {
+ Trace.TraceInformation("AsyncInit aborted due to cancellation token set");
+ }
+ }
+ }
+ }
+
+ ///
+ /// Fires before the proxy writes a buffer to the host
+ ///
+ public event EventHandler OnWriteHost;
+
+ ///
+ /// Fires before the proxy writes a buffer to the client
+ ///
+ public event EventHandler OnWriteClient;
+
+ ///
+ /// Fires when a new connection to the proxy's port is accepted
+ ///
+ public event EventHandler OnConnect;
+
+ private void ForwardToSql(TcpClient ourClient, TcpClient sqlClient)
+ {
+ long index = 0;
+ try
+ {
+ while (!disposed)
+ {
+ byte[] buffer = new byte[BufferSizeBytes];
+ int bytesRead = ourClient.GetStream().ReadAsync(buffer, 0, buffer.Length, tokenSource.Token).Result;
+ if (!tokenSource.Token.IsCancellationRequested)
+ {
+ OnWriteHost?.Invoke(this, new StreamWriteEventArgs(index++, buffer, bytesRead));
+ sqlClient.GetStream().Write(buffer, 0, bytesRead);
+ }
+ }
+ }
+ catch (Exception)
+ {
+ if (!disposed)
+ {
+ throw;
+ }
+ }
+ finally
+ {
+ Trace.TraceInformation("ForwardToSql exiting");
+ }
+ }
+
+ private void ForwardToClient(TcpClient ourClient, TcpClient sqlClient)
+ {
+ long index = 0;
+ try
+ {
+ while (!disposed)
+ {
+ byte[] buffer = new byte[BufferSizeBytes];
+ int bytesRead = sqlClient.GetStream().ReadAsync(buffer, 0, buffer.Length, tokenSource.Token).Result;
+ if (!tokenSource.Token.IsCancellationRequested)
+ {
+ OnWriteClient?.Invoke(this, new StreamWriteEventArgs(index++, buffer, bytesRead));
+ ourClient.GetStream().Write(buffer, 0, bytesRead);
+ }
+ }
+ }
+ catch (Exception)
+ {
+ if (!disposed)
+ {
+ throw;
+ }
+ }
+ finally
+ {
+ Trace.TraceInformation("ForwardToClient exiting");
+ }
+ }
+
+ private static void GetTcpInfoFromDataSource(string dataSource, out string hostName, out int port)
+ {
+ string[] dataSourceParts = dataSource.Split(',');
+ if (dataSourceParts.Length == 1)
+ {
+ hostName = dataSourceParts[0].Replace("tcp:", "");
+ port = 1433;
+ }
+ else if (dataSourceParts.Length == 2)
+ {
+ hostName = dataSourceParts[0].Replace("tcp:", "");
+ port = int.Parse(dataSourceParts[1]);
+ }
+ else
+ {
+ throw new InvalidOperationException("TCP Connection String not in correct format!");
+ }
+ }
+
+ public void Dispose()
+ {
+ disposed = true;
+ tokenSource.Cancel();
+ Trace.TraceInformation("Disposing TcpListener on port {0}", Port);
+ listener?.Stop();
+ }
+ }
+
+ ///
+ /// Arguments passed to the OnWriteHost and OnWriteClient event handlers
+ ///
+ public class StreamWriteEventArgs : EventArgs
+ {
+ internal StreamWriteEventArgs(long index, byte[]buffer, int bytesWritten)
+ {
+ Index = index;
+ Buffer = buffer;
+ BytesWritten = bytesWritten;
+ }
+ ///
+ /// The 0-based index of this event showing the order it was received
+ ///
+ public long Index;
+ ///
+ /// The data sent or received. This array is a fixed size.
+ ///
+ public byte[] Buffer;
+ ///
+ /// How many bytes are in the Buffer.
+ ///
+ public int BytesWritten;
+ }
+
+ ///
+ /// Arguments passed to the OnConnect event handler
+ ///
+ public class ProxyConnectionEventArgs : EventArgs
+ {
+ internal ProxyConnectionEventArgs(TcpClient client)
+ {
+ Client = client;
+ }
+
+ ///
+ /// The new TcpClient instance associated with the connection
+ ///
+ public TcpClient Client;
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/PolicyStoreHelpers.cs b/src/FunctionalTest/Framework/Helpers/PolicyStoreHelpers.cs
new file mode 100644
index 00000000..84d164ac
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/PolicyStoreHelpers.cs
@@ -0,0 +1,190 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using Microsoft.SqlServer.Management.Dmf;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods, constants, etc dealing with the SMO PolicyStore object
+ ///
+ public static class PolicyStoreHelpers
+ {
+ ///
+ /// Creates a local DMF object in this but does not actually create it on the server
+ ///
+ /// The PolicyStore to create the Policy in
+ /// The Facet this Condition is for
+ /// The expression this Condition will evaluate
+ /// The prefix for the name of the Policy
+ ///
+ public static Condition CreateConditionDefinition(this PolicyStore policyStore, string facet, ExpressionNode expressionNodeExpression, string conditionNamePrefix = "cond_")
+ {
+ string name = SmoObjectHelpers.GenerateUniqueObjectName(conditionNamePrefix);
+ TraceHelper.TraceInformation("Creating Condition definition {0} in PolicyStore {1}", name, policyStore.Name);
+
+ var condition = new Condition(policyStore, name)
+ {
+ Facet = facet,
+ ExpressionNode = expressionNodeExpression
+ };
+
+ return condition;
+ }
+
+ ///
+ /// Creates a DMF object in this
+ ///
+ /// The PolicyStore to create the Policy in
+ /// The Facet this Condition is for
+ /// The expression this Condition will evaluate
+ /// The prefix for the name of the Policy
+ ///
+ public static Condition CreateCondition(
+ this PolicyStore policyStore,
+ string facet,
+ string expressionNodeExpression,
+ string conditionNamePrefix = "condition_")
+ {
+ return policyStore.CreateCondition(facet, ExpressionNode.Parse(expressionNodeExpression),
+ conditionNamePrefix);
+ }
+
+ ///
+ /// Creates a DMF object in this
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static Condition CreateCondition(
+ this PolicyStore policyStore,
+ string facet,
+ ExpressionNode expressionNode,
+ string conditionNamePrefix = "condition_")
+ {
+ var condition = policyStore.CreateConditionDefinition(facet, expressionNode, conditionNamePrefix);
+ TraceHelper.TraceInformation("Creating Condition {0} in PolicyStore {1}", condition.Name, policyStore.Name);
+ condition.Create();
+ return condition;
+ }
+ ///
+ /// Creates a local DMF object but does not actually create it on the server
+ ///
+ /// The PolicyStore to create the Policy in
+ /// The name of the condition this Policy will evaluate
+ /// The for the Policy
+ /// The name of the object set containing the object conditions this Policy will execute against
+ /// The prefix for the name of the Policy
+ ///
+ public static Policy CreatePolicyDefinition(
+ this PolicyStore policyStore,
+ string condition,
+ AutomatedPolicyEvaluationMode policyEvaluationMode = AutomatedPolicyEvaluationMode.None,
+ string objectSet = null,
+ string policyNamePrefix = "policy_")
+ {
+ string name = SmoObjectHelpers.GenerateUniqueObjectName(policyNamePrefix);
+ TraceHelper.TraceInformation("Creating Policy definition {0} in PolicyStore {1}", name, policyStore.Name);
+
+ var policy = new Policy(policyStore, name)
+ {
+ Condition = condition,
+ AutomatedPolicyEvaluationMode = policyEvaluationMode,
+ ObjectSet = objectSet
+ };
+
+ return policy;
+ }
+
+ ///
+ /// Creates a DMF object in this
+ ///
+ /// The PolicyStore to create the Policy in
+ /// The name of the condition this will evaluate
+ /// The for the
+ /// The name of the object set containing the object conditions this will execute against
+ /// The prefix for the name of the Policy
+ ///
+ public static Policy CreatePolicy(
+ this PolicyStore policyStore,
+ string condition,
+ AutomatedPolicyEvaluationMode policyEvaluationMode = AutomatedPolicyEvaluationMode.None,
+ string objectSet = null,
+ string policyNamePrefix = "policy_")
+ {
+ var policy = policyStore.CreatePolicyDefinition(condition, policyEvaluationMode, objectSet, policyNamePrefix);
+ TraceHelper.TraceInformation("Creating Policy {0} in PolicyStore {1}", policy.Name, policyStore.Name);
+ policy.Create();
+ return policy;
+ }
+
+ ///
+ /// Creates a local DMF object but does not actually create it on the server
+ ///
+ ///
+ /// A mapping of the set of TargetSets to enable, along with the list of LevelConditions to set
+ /// for each TargetSet. The key is the URN Skeleton of the object we're setting LevelConditions for, with the list being a mapping of Levels
+ /// along with the Condition name to apply at that level
+ /// The Facet this ObjectSet is for
+ /// The prefix for the name of this ObjectSet
+ ///
+ public static ObjectSet CreateObjectSetDefinition(
+ this PolicyStore policyStore,
+ IDictionary>> targetSetsAndLevelConditions,
+ string facet,
+ string objectSetNamePrefix = "object_set_" )
+ {
+ string name = SmoObjectHelpers.GenerateUniqueObjectName(objectSetNamePrefix);
+ TraceHelper.TraceInformation("Creating ObjectSet definition {0} in PolicyStore {1}", name, policyStore.Name);
+ var objectSet = new ObjectSet(policyStore, name)
+ {
+ Facet = facet
+ };
+ if (targetSetsAndLevelConditions != null)
+ {
+ foreach (
+ KeyValuePair>> targetSetAndLevelConditions in
+ targetSetsAndLevelConditions)
+ {
+ //The key is the target set ID - which is a SFC URN Skeleton of the objects we're creating a set of
+ var targetSet = objectSet.TargetSets[targetSetAndLevelConditions.Key];
+ targetSet.Enabled = true;
+ //Set all the level conditions, which tells the object set what conditions to
+ //apply at each level of the URN
+ foreach (Tuple levelCondition in targetSetAndLevelConditions.Value)
+ {
+ targetSet.SetLevelCondition(targetSet.GetLevel(levelCondition.Item1), levelCondition.Item2);
+ }
+ }
+ }
+ return objectSet;
+ }
+
+ ///
+ /// Creates a DMF object in this
+ ///
+ ///
+ /// A mapping of the set of TargetSets to enable, along with the list of LevelConditions to set
+ /// for each TargetSet. The key is the URN Skeleton of the object we're setting LevelConditions for, with the list being a mapping of Levels
+ /// along with the Condition name to apply at that level
+ /// The Facet this ObjectSet is for
+ /// The prefix for the name of this ObjectSet
+ ///
+ public static ObjectSet CreateObjectSet(
+ this PolicyStore policyStore,
+ IDictionary>> targetSetsAndLevelConditions,
+ string facet,
+ string objectSetNamePrefix = "object_set_")
+ {
+ var objectSet = policyStore.CreateObjectSetDefinition(targetSetsAndLevelConditions, facet, objectSetNamePrefix);
+ TraceHelper.TraceInformation("Creating ObjectSet {0} in PolicyStore {1}", objectSet.Name, policyStore.Name);
+ objectSet.Create();
+ return objectSet;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Helpers/PropertyClasses.cs b/src/FunctionalTest/Framework/Helpers/PropertyClasses.cs
new file mode 100644
index 00000000..20c71969
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/PropertyClasses.cs
@@ -0,0 +1,125 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Holds information about a column
+ ///
+ public class ColumnProperties
+ {
+ public ColumnProperties(string name) : this(name, DataType.Int)
+ {
+
+ }
+
+ public ColumnProperties(string name, DataType dataType)
+ {
+ Name = name;
+ Type = dataType.SqlDataType;
+ SmoDataType = dataType;
+ }
+ public readonly string Name;
+ public readonly SqlDataType Type;
+ public readonly DataType SmoDataType;
+ public bool Nullable = true;
+ public bool Identity = false;
+ public override string ToString()
+ {
+ return $"{Name}-{SmoDataType}-";
+ }
+ }
+
+ ///
+ /// Small helper class to encapsulate the various properties used to create an index
+ ///
+ public class IndexProperties
+ {
+ public IndexType IndexType = IndexType.ClusteredIndex;
+ ///
+ /// The Key Type of the index, default None
+ ///
+ public IndexKeyType KeyType = IndexKeyType.None;
+ ///
+ /// The columns to include in this index
+ ///
+ public Column[] Columns = null;
+ ///
+ /// Whether the index is clustered or not
+ ///
+ public bool IsClustered = true;
+ ///
+ /// Whether the index is unique or not
+ ///
+ public bool IsUnique = false;
+
+ ///
+ /// Whether the index is online or not
+ ///
+ public bool OnlineIndexOperation = false;
+
+ ///
+ /// The name of the index, cannot be empty.
+ ///
+ public string Name = String.Empty;
+
+ ///
+ /// Additional column names to include while creating this index.
+ ///
+ public string[] ColumnNames = null;
+
+ ///
+ /// Whether we should create the index in a resumable fashion or not.
+ ///
+ public bool Resumable = false;
+ }
+
+ ///
+ /// Class defining table properties used when creating tables through the 'DatabaseObjectHelper' class.
+ ///
+ public class TableProperties
+ {
+ #region Properties
+
+ ///
+ /// Gets or sets a property determining if the table should be created as a node table.
+ ///
+ public bool IsNode { get; set; }
+
+ ///
+ /// Gets or sets a property determining if the table should be created as an edge table.
+ ///
+ public bool IsEdge { get; set; }
+
+ #endregion
+
+ #region Methods
+
+ ///
+ /// This method applies the set properties of this object to the table object.
+ ///
+ /// The table to set the properties on.
+ public void ApplyProperties(Table table)
+ {
+ if (table == null)
+ {
+ throw new ArgumentException("table");
+ }
+
+ if (IsEdge)
+ {
+ table.IsEdge = true;
+ }
+
+ if (IsNode)
+ {
+ table.IsNode = true;
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/RetryHelper.cs b/src/FunctionalTest/Framework/Helpers/RetryHelper.cs
new file mode 100644
index 00000000..0c739adc
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/RetryHelper.cs
@@ -0,0 +1,77 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Threading;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ ///
+ /// Provides utility methods for retry logic
+ ///
+ public class RetryHelper
+ {
+ ///
+ /// Will retry calling a method a number of times with a delay between each one if
+ /// an exception is thrown while calling the specified method.
+ ///
+ /// Method to invoke
+ /// Number of times to retry calling the method
+ /// Delay between retry attempts in milliseconds
+ /// Base message to display in logs for each failure attempt
+ public static void RetryWhenExceptionThrown(Action method, int retries = 3, int retryDelayMs = 1000, string retryMessage = "")
+ {
+ if (string.IsNullOrWhiteSpace(retryMessage))
+ {
+ retryMessage = string.Format("Method call {0} failed.", method.Method.Name);
+ }
+ do
+ {
+ try
+ {
+ method();
+ break;
+ }
+ catch (Exception e)
+ {
+ if (retries-- > 0)
+ {
+ TraceHelper.TraceInformation("{0} ({1} tries left) - {2}", retryMessage, retries, e.Message);
+ Thread.Sleep(retryDelayMs);
+ }
+ else
+ {
+ throw;
+ }
+ }
+ } while (true);
+ }
+
+ ///
+ /// Will retry calling a method a number of times with a delay between each one if
+ /// an exception is thrown while calling the specified method. This will return
+ /// successfully regardless of
+ ///
+ /// Method to invoke
+ /// Number of times to retry calling the method
+ /// Delay between retry attempts in milliseconds
+ /// Base message to display in logs for each failure attempt
+ /// TRUE if the method executed successfully (possibly with retries). FALSE if all retry
+ /// attempts failed
+ public static bool RetryWhenExceptionThrownNoFail(Action method, int retries = 3, int retryDelayMs = 1000,
+ string retryMessage = "")
+ {
+ try
+ {
+ RetryHelper.RetryWhenExceptionThrown(method, retries, retryDelayMs, retryMessage);
+ return true;
+ }
+ catch (Exception e)
+ {
+ TraceHelper.TraceInformation("Caught exception calling retry method but purposely ignoring - {0}", e.Message);
+ //Don't let exceptions bubble up for this
+ return false;
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/ScriptHelpers.cs b/src/FunctionalTest/Framework/Helpers/ScriptHelpers.cs
new file mode 100644
index 00000000..d3149ae1
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ScriptHelpers.cs
@@ -0,0 +1,74 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Set of helper methods for running SQL scripts which contain tokenized values
+ ///
+ public class ScriptHelpers
+ {
+ ///
+ /// Constructs a new ScriptHelpers instance
+ ///
+ /// The helper that fetches AKV secrets identified in the tokenized script. Can be null if the scripts have no secrets.
+ public ScriptHelpers(AzureKeyVaultHelper azureKeyVaultHelper = null)
+ {
+ this.AzureKeyVaultHelper = azureKeyVaultHelper;
+ }
+
+ private AzureKeyVaultHelper AzureKeyVaultHelper { get; }
+
+ ///
+ /// Loads an embedded script resource and runs it against the specified database
+ ///
+ ///
+ ///
+ /// The assembly to load the script resources from. Defaults to the Utils assembly if null
+ public void LoadAndRunScriptResource(string scriptName, Database database, Assembly asm = null)
+ {
+ TraceHelper.TraceInformation("Running script '{0}' against database {1}", scriptName, database.Name);
+
+ asm = asm ?? Assembly.GetExecutingAssembly();
+
+ string[] resourceNames = asm.GetManifestResourceNames();
+ Stream scriptStream = null;
+ if (resourceNames != null)
+ {
+ string resourceName = resourceNames.Where(x => System.StringComparer.Ordinal.Equals(x, scriptName)).FirstOrDefault();
+
+ if (resourceName != null)
+ {
+ scriptStream = asm.GetManifestResourceStream(resourceName);
+ }
+ }
+
+ if (scriptStream == null)
+ {
+ throw new FailedOperationException(string.Format("Failed to load script resource {0}", scriptName));
+ }
+
+ using (var reader = new StreamReader(scriptStream))
+ {
+ string script = ScriptTokenizer.UntokenizeString(reader.ReadToEnd(), database, AzureKeyVaultHelper);
+ try
+ {
+ database.ExecuteNonQuery(script.FixNewLines());
+ }
+ catch (FailedOperationException se)
+ {
+ //Throw a new exception here since FailedOperationExceptions have a couple nested exceptions, so to
+ //avoid having to iterate through them ourselves and append the messages we let the test framework
+ //handle that
+ throw new FailedOperationException(string.Format("Failed to execute script {0}", scriptName), se);
+ }
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/ScriptSchemaObjectBaseHelpers.cs b/src/FunctionalTest/Framework/Helpers/ScriptSchemaObjectBaseHelpers.cs
new file mode 100644
index 00000000..d838f330
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ScriptSchemaObjectBaseHelpers.cs
@@ -0,0 +1,25 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System.Globalization;
+using Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Provides helper functionality for SMO ScriptSchemaObjectBase objects.
+ ///
+ public static class ScriptSchemaObjectBaseHelpers
+ {
+ ///
+ /// Returns the schema-qualified name of the object without brackets. Use
+ /// FullQualifiedName to get the schema-qualified name with brackets.
+ ///
+ ///
+ ///
+ public static string GetSchemaQualifiedNameNoBrackets(this ScriptSchemaObjectBase obj)
+ {
+ return string.Format("{0}.{1}", obj.Schema, obj.Name, CultureInfo.InvariantCulture);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Helpers/ScriptTokenizer.cs b/src/FunctionalTest/Framework/Helpers/ScriptTokenizer.cs
new file mode 100644
index 00000000..ca328758
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ScriptTokenizer.cs
@@ -0,0 +1,451 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Text.RegularExpressions;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ public class ScriptTokenizer
+ {
+
+ ///
+ /// Regex to find instances of passwords in connection strings, which we'll then replace with
+ /// the $(ConnStringPassword) token so we don't have passwords in the checked in files
+ ///
+ /// Note the check is a bit simplistic and doesn't cover all the edge cases - but until we come across
+ /// an edge case just leaving it like this for times sake
+ ///
+ private static readonly Regex ConnStringPasswordRegex = new Regex("password=.*;", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ //Regex to match the SecretStore tokens. A SecretStore token is in the form
+ //$(SecretStore:)
+ //Where is the secret name, minus the common prefix
+ //So a token like this $(SecretStore:SmoBaselineVerification_WasbKey/Secret) would
+ //retrieve the value for a secret with the full name of SSMS_TEST_SECRET_PREFIX + SmoBaselineVerification_WasbKey/Secret
+ private static readonly Regex SecretStoreRegex = new Regex(@"(\$\(SecretStore:(?.*?)\))", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+
+ ///
+ /// Matches cluster domain names, which is in the form of
+ /// $(ServerName) OR $(SingleQuoteEscapedServerName) OR $(BracketEscapedServerName)
+ /// .
+ /// Any number of non-whitespace characters (non-greedy)
+ /// .com OR .net
+ ///
+ /// This is meant to be ran after the server name has been replaced since on Azure servers the server TrueName does not include
+ /// the domain - so we want to keep that part separate.
+ ///
+ private static readonly Regex ClusterDomainNameRegex = new Regex(@"\$\((BracketEscapedServerName|SingleQuoteEscapedServerName|ServerName)\)\.\S*?\.(com|net)", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+
+ ///
+ /// This matches multi-line T-SQL comments which start with /* and end with */
+ ///
+ private static readonly Regex MultiLineCommentsRegex = new Regex(@"\/\*.*\*\/", RegexOptions.Compiled | RegexOptions.Singleline);
+
+ private const string TOKEN_DatabaseName = "$(DatabaseName)";
+ private const string TOKEN_SingleQuoteEscapedDatabaseName = "$(SingleQuoteEscapedDatabaseName)";
+ private const string TOKEN_BracketEscapedDatabaseName = "$(BracketEscapedDatabaseName)";
+ private const string TOKEN_QuoteAndBracketEscapedDatabaseName = "$(QuoteAndBracketEscapedDatabaseName)";
+ private const string TOKEN_ServerName = "$(ServerName)";
+ private const string TOKEN_ServerInternalName = "$(ServerInternalName)";
+ private const string TOKEN_SingleQuoteEscapedServerName = "$(SingleQuoteEscapedServerName)";
+ private const string TOKEN_BracketEscapedServerName = "$(BracketEscapedServerName)";
+ private const string TOKEN_BracketEscapedServerInternalName = "$(BracketEscapedServerInternalName)";
+ private const string TOKEN_DefaultDataPath = "$(DefaultDataPath)";
+ private const string TOKEN_TSqlPassword = "$(TSqlPassword)";
+ private const string TOKEN_RandomTSqlPassword = "$(RandomTSqlPassword)";
+ private const string TOKEN_RandomTSqlSecret = "$(RandomTSqlSecret)";
+ private const string TOKEN_ConnStringPassword = "$(ConnStringPassword)";
+ private const string TOKEN_ScriptDate = "$(ScriptDate)";
+ private const string TOKEN_ScriptStatsStream = "$(StatsStream)";
+ private const string TOKEN_ClusterDomainName = "$(ClusterDomainName)";
+ private const string TOKEN_RandomGuid = "$(RandomGuid)";
+ //The svr.Version string (usually Major.Minor.Build, no revision)
+ private const string TOKEN_ServerVersion = "$(ServerVersion)";
+ //The full Major.Minor.Build.Revision version string
+ private const string TOKEN_ServerVersionString = "$(ServerVersionString)";
+ //The SERVERPROPERTY('ProductVersion') from the Database-specific connection
+ private const string TOKEN_DatabaseProductVersion = "$(DatabaseProductVersion)";
+ // The name of the computer hosting the instance
+ private const string TOKEN_ComputerName = "$(ComputerName)";
+ // The data source specified in the connection string. Must be tokenized after server name
+ public const string TOKEN_DataSource = "$(DataSource)";
+ // Unnamed Clustered Index names are random. Must be tokenized.
+ public const string TOKEN_ClusteredIndexName = "$(ClusteredIndexName)";
+ // Edge constraint name
+ public const string TOKEN_EdgeConstraintName = "$(EdgeConstraintName)";
+ // Sql MI file paths (uri) must be tokenized
+ private const string TOKEN_MasterDBPath = "$(MasterDBPath)";
+ private const string TOKEN_MasterDBLogPath = "$(MasterDBLogPath)";
+ private const string TOKEN_PrimaryFilePath = "$(PrimaryFilePath)";
+ private const string TOKEN_LogFileName = "$(LogFileName)";
+
+ // The default backup directory.
+ // Example of values:
+ // - E:\SQLDIRS\IN\MSSQL15.MSSQLSERVER\MSSQL\Backup
+ // - C:\Program Files\Microsoft SQL Server\MSSQL10.MSSQLSERVER\MSSQL\Backup
+ private const string TOKEN_BackupDirectory = "$(BackupDirectory)";
+ private const string TOKEN_SingleQuoteEscapedBackupDirectory = "$(SingleQuoteEscapedBackupDirectory)";
+
+ // The default error log directory/path.
+ // Example of values:
+ // - E:\SQLDIRS\IN\MSSQL15.MSSQLSERVER\MSSQL\Log
+ // - C:\Program Files\Microsoft SQL Server\MSSQL11.MSSQLSERVER\MSSQL\Log
+ private const string TOKEN_ErrorLogPath = "$(ErrorLogPath)";
+ private const string TOKEN_SingleQuoteEscapedErrorLogPath = "$(SingleQuoteEscapedErrorLogPath)";
+
+ ///
+ /// Replaces certain strings in the specified string with tokens for generic comparison.
+ ///
+ /// Current replacements :
+ /// Database Name -> $(DatabaseName)
+ /// Database Name w/ ]'s escaped -> $(BracketEscapedDatabaseName)
+ /// Database Name w/ ' escaped -> $(SingleQuoteEscapedDatabaseName)
+ /// Server Name -> $(ServerName)
+ /// Server Name w/ ]'s escaped -> $(BracketEscapedServerName)
+ /// Server Name w/ ' escaped -> $(SingleQuoteEscapedServerName)
+ /// Cluster Domain Name -> $(ClusterDomainName) (for Azure servers)
+ /// Default Data Path (Path to data files) -> $(DefaultDataPath)
+ /// T-SQL Password (PASSWORD=N'*****') -> $(TSqlPassword)
+ ///
+ ///
+ ///
+ ///
+ /// Note that the password is NOT untokenized since there's no way to know what the new password is.
+ /// Instead we'll just leave the token in so that the comparison will be true.
+ public static string TokenizeString(string str, Database database)
+ {
+ Management.Smo.Server svr = database.Parent;
+ //Valid URN server name recognized by SMO should be the server true name. For On-premises server, it equals to
+ //the real server name. For Azure, it should be the part before the first dot in its full DNS name, for example:
+ //"" in ".database.windows.net"
+ string trueServerName = svr.ConnectionContext.TrueName;
+ string internalServerName = svr.InternalName.Substring(1, svr.InternalName.Length - 2); // svr.InternalName is enclosed in square brackets, so we remove them
+
+ //Regex for matching a quoted-and-escaped database/server name
+ // First group is the open quote character (' or [) followed by any number of non-closing quote characters (' or ])
+ // Next is the closing quote escaped database name
+ // And the final group is any number of non-closing quote characters followed by the closing quote char
+ //This is so we can correctly identify and replace quoted database/server names and replace them with the
+ //appropriate generic token but this is complicated by there potentially being other characters between the quotes
+ var singleQuoteQuotedDatabaseNameRegex = new Regex(@"('[^']*)" + Regex.Escape(database.Name).Replace("'", "''") + @"([^']*)", RegexOptions.IgnoreCase);
+ var bracketQuotedDatabaseNameRegex = new Regex(@"(\[[^\]]*)" + Regex.Escape(database.Name).Replace("]", "]]") + @"([[^\]]*)", RegexOptions.IgnoreCase);
+ var singleQuoteQuotedServerNameRegex = new Regex(@"('[^']*)" + Regex.Escape(trueServerName).Replace("'", "''") + @"([^']*)", RegexOptions.IgnoreCase);
+ var bracketQuotedServerNameRegex = new Regex(@"(\[[^\]]*)" + Regex.Escape(trueServerName).Replace("]", "]]") + @"([[^\]]*)", RegexOptions.IgnoreCase);
+ var bracketQuotedServerNameInternalRegex = new Regex(@"(\[[^\]]*)" + Regex.Escape(internalServerName) + @"([[^\]]*)", RegexOptions.IgnoreCase);
+ var clusteredIndexNameRegex = new Regex(@"(ClusteredIndex_[a-z0-9]{32})");
+ var edgeConstraintNameRegex = new Regex(@"(EC__.*__[A-Z0-9a-z]*)");
+
+
+ var scriptDateRegex = new Regex(@"(Script Date:.* [P|A]M)", RegexOptions.IgnoreCase);
+ var scriptStatsStreamRegex = new Regex(@"(STATS_STREAM = (?:0[xX])?[0-9a-fA-F]+)", RegexOptions.IgnoreCase);
+ //The string used to do the replacement, $1 and $2 are the 2 groups which consist of all the matched characters that are not
+ //the actual DB name (so the prefix and postfix)
+ const string replacementString = "$1{0}$2";
+
+ str = singleQuoteQuotedDatabaseNameRegex.Replace(str, replacementString.FormatStr(TOKEN_SingleQuoteEscapedDatabaseName));
+ str = bracketQuotedDatabaseNameRegex.Replace(str, replacementString.FormatStr(TOKEN_BracketEscapedDatabaseName));
+ //There might be instances of the database name that aren't quoted, those can just be string replaced
+ str = str.Replace(database.Name, TOKEN_DatabaseName, StringComparison.CurrentCultureIgnoreCase);
+ str = singleQuoteQuotedServerNameRegex.Replace(str, replacementString.FormatStr(TOKEN_SingleQuoteEscapedServerName));
+ str = bracketQuotedServerNameRegex.Replace(str, replacementString.FormatStr(TOKEN_BracketEscapedServerName));
+ str = bracketQuotedServerNameInternalRegex.Replace(str, replacementString.FormatStr(TOKEN_BracketEscapedServerInternalName));
+ str = clusteredIndexNameRegex.Replace(str, TOKEN_ClusteredIndexName);
+ str = edgeConstraintNameRegex.Replace(str, TOKEN_EdgeConstraintName);
+
+ if (trueServerName.Length >= internalServerName.Length)
+ {
+ //There might be instances of the server name that aren't quoted, those can just be string replaced
+ str = str.Replace(trueServerName, TOKEN_ServerName, StringComparison.CurrentCultureIgnoreCase);
+ //There might be instances of the server name (the internal name, which could be different from the trueServerName e.g. in case of a CNAME) that aren't quoted, those can just be string replaced
+ str = str.Replace(internalServerName, TOKEN_ServerInternalName, StringComparison.CurrentCultureIgnoreCase);
+ }
+ else
+ {
+ //There might be instances of the server name (the internal name, which could be different from the trueServerName e.g. in case of a CNAME) that aren't quoted, those can just be string replaced
+ str = str.Replace(internalServerName, TOKEN_ServerInternalName, StringComparison.CurrentCultureIgnoreCase);
+ //There might be instances of the server name that aren't quoted, those can just be string replaced
+ str = str.Replace(trueServerName, TOKEN_ServerName, StringComparison.CurrentCultureIgnoreCase);
+ }
+
+ str = str.Replace(svr.VersionString, TOKEN_ServerVersionString);
+ str = str.Replace(svr.Version.ToString(), TOKEN_ServerVersion);
+ str = str.Replace(database.ExecutionManager.ConnectionContext.ProductVersion.ToString(), TOKEN_DatabaseProductVersion);
+ //For Azure servers, their full name has two parts: true server name + domain name suffix. Let's tokenize
+ //the domain name suffix as well
+
+ //If the server name has a . in it then it is domain-qualified and as such the
+ //true name won't include that part. We'll tokenize the domain separately.
+ int firstDot = svr.Name.IndexOf('.');
+ if (firstDot >= 0)
+ {
+ str = ClusterDomainNameRegex.Replace(str, "$($1)" + TOKEN_ClusterDomainName);
+ }
+
+ str = scriptDateRegex.Replace(str, TOKEN_ScriptDate);
+ str = scriptStatsStreamRegex.Replace(str, TOKEN_ScriptStatsStream);
+
+ //Some scripts generate a randomly generated password - since that makes comparison difficult let's try to
+ //find it and replace it with a token instead.
+ int passwordIndexStart = str.IndexOf("PASSWORD=N'", StringComparison.OrdinalIgnoreCase);
+ if (passwordIndexStart >= 0)
+ {
+ passwordIndexStart += 11;
+ int curIndex = passwordIndexStart;
+ while (curIndex < str.Length)
+ {
+ if (str[curIndex] == '\'')
+ {
+ //Skip escaped '
+ if (curIndex + 1 < str.Length && str[curIndex + 1] == '\'')
+ {
+ curIndex += 2;
+ continue;
+ }
+
+ //Replace password with token
+ str = str.Substring(0, passwordIndexStart) + TOKEN_TSqlPassword + str.Substring(curIndex);
+ break;
+ }
+ curIndex++;
+ }
+ }
+
+ if (svr.DatabaseEngineEdition == DatabaseEngineEdition.SqlManagedInstance)
+ {
+
+ // 'dynamic' tokens to match different filegroups on sql MI (names are GUIDs)
+ foreach (FileGroup fileGroup in database.FileGroups)
+ {
+ foreach (DataFile dataFile in fileGroup.Files)
+ {
+ str = str.Replace(dataFile.FileName, $"$({ dataFile.Name }_FileName)");
+ }
+ }
+ // PrimaryFilePath must be probed last because it's contained in almost all of the other paths (except for backup dir)
+ str = str
+ .Replace(svr.MasterDBPath, TOKEN_MasterDBPath)
+ .Replace(svr.MasterDBLogPath, TOKEN_MasterDBLogPath)
+ .Replace(svr.BackupDirectory, TOKEN_BackupDirectory)
+ .Replace(database.LogFiles[0].FileName, TOKEN_LogFileName)
+ .Replace(database.PrimaryFilePath, TOKEN_PrimaryFilePath);
+ }
+
+ //Azure doesn't expose the data file paths
+ if (svr.ServerType != DatabaseEngineType.SqlAzureDatabase)
+ {
+ var defaultDataPath = string.IsNullOrEmpty(svr.Settings.DefaultFile) ? svr.MasterDBPath : svr.Settings.DefaultFile;
+ //MasterDbPath doesn't have trailing slash so add it if it doesn't exist
+ defaultDataPath += defaultDataPath.EndsWith(@"\") ? "" : @"\";
+
+ //Paths are always quoted so need to escape the apostrophe
+ defaultDataPath = defaultDataPath.Replace("'", "''");
+ str = str.Replace(defaultDataPath, TOKEN_DefaultDataPath, StringComparison.CurrentCultureIgnoreCase);
+ // If the servername has an instance name, just grab the computer name part
+ if (trueServerName.Contains(@"\"))
+ {
+ var nameComponents = trueServerName.Split('\\');
+ str = str.Replace(nameComponents[0], TOKEN_ComputerName, StringComparison.CurrentCultureIgnoreCase);
+ }
+
+ // Take care of the Backup and Error Log directories...
+ var backupDirectory = string.IsNullOrEmpty(svr.Settings.BackupDirectory) ? svr.BackupDirectory : svr.Settings.BackupDirectory;
+ var escapedBackupDirectory = backupDirectory.Replace("'", "''");
+ var errorLogPath = svr.ErrorLogPath;
+ var escapedErrorLogPath = errorLogPath.Replace("'", "''");
+
+ // Order matters. in the sense that, most likely, these two strings are identical (unless one
+ // day we decide to put the SQL files under a folder whose name contains single-quote characters).
+ // So, first we do the replacement looking for the unescaped string, then we do the replacement
+ // using the escaped strings: if the two strings are identical, we prefer the TOKEN_BackupDirectory
+ // over the TOKEN_SingleQuoteEscapedBackupDirectory.
+ str = str.Replace(backupDirectory, TOKEN_BackupDirectory, StringComparison.CurrentCultureIgnoreCase);
+ str = str.Replace(escapedBackupDirectory, TOKEN_SingleQuoteEscapedBackupDirectory, StringComparison.CurrentCultureIgnoreCase);
+
+ // Same log used above applies to Error Log Path as well...
+ if (!string.IsNullOrEmpty(errorLogPath))
+ {
+ str = str.Replace(errorLogPath, TOKEN_ErrorLogPath, StringComparison.CurrentCultureIgnoreCase);
+ str = str.Replace(escapedErrorLogPath, TOKEN_SingleQuoteEscapedErrorLogPath, StringComparison.CurrentCultureIgnoreCase);
+ }
+ }
+
+ //A workaround needed to keep testing Database.baseline.xml for SqlOnDemand
+ //In the second line of scripting parameters for all targets, the SIZE randomly reports either 8192KB or 73728KB
+ //Setting either of those two as the expected baseline will cause the test to fail some of the time
+ if (svr.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand && str.Contains("CREATE DATABASE"))
+ {
+ str = Regex.Replace(str, "73728KB", "8192KB");
+ }
+ //Tokenize the stripped values as well
+ str = TokenizeStrippedValues(str);
+
+ return str;
+ }
+
+ ///
+ /// Strips out certain values from the given string, replacing them with a token. This is
+ /// intended for values which once stripped can't be Untokenized since the values are
+ /// random (or otherwise unknown) when comparing the baseline at a later time.
+ ///
+ /// Connection String Password (password=...;) -> $(ConnStringPassword)
+ ///
+ ///
+ ///
+ public static string TokenizeStrippedValues(string str)
+ {
+ //We can't have passwords in the files so strip the ones in the connection strings out. We currently
+ //don't need the actual password values when running the tests anyways as just verifying that it
+ //contains a password segment is enough for our tests
+ str = ConnStringPasswordRegex.Replace(str, TOKEN_ConnStringPassword);
+
+ return str;
+ }
+
+ ///
+ /// Replaces certain token strings in the specified string with their actual values.
+ ///
+ /// Current replacements :
+ /// $(DatabaseName) -> Database Name
+ /// $(BracketEscapedDatabaseName) -> Database Name w/ ]'s escaped
+ /// $(SingleQuoteEscapedDatabaseName) -> Database Name w/ ' escaped
+ /// $(ServerName) -> Server Name
+ /// $(BracketEscapedServerName) -> Server Name w/ ]'s escaped
+ /// $(SingleQuoteEscapedServerName) -> Server Name w/ ' escaped
+ /// For Azure servers, $(ClusterDomainName) -> Server Domain Name
+ /// $(DefaultDataPath) -> Default Data Path (Path to data files)
+ /// $(RandomGuid) -> Random GUID string
+ /// $(RandomTSqlPassword) -> Password='[Random Guid String]'
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static string UntokenizeString(string str, Database database, AzureKeyVaultHelper azureKeyVaultHelper)
+ {
+ Management.Smo.Server svr = database.Parent;
+ //Valid URN server name recognized by SMO should be the server true name. For On-premise server, it equals to
+ //the real server name. For Azure, it should be the part before the first dot in its full DNS name, for example:
+ //"" in ".database.windows.net".
+ string trueServerName = svr.ConnectionContext.TrueName;
+ string internalServerName = svr.InternalName.Substring(1, svr.InternalName.Length - 2); // svr.InternalName is enclosed in square brackets, so we remove them
+ // If the connection string used "." or "localhost" it will not get the host name from SQL. Linux has lowercase host names, Windows upper.
+ // SQL always returns upper.
+ if (internalServerName != System.Environment.MachineName)
+ {
+ internalServerName = internalServerName.ToUpperInvariant();
+ }
+
+ //Server names are replaced as upper case since they're case insensitive, so for ease of comparison we put them all as upper
+ string ret = str
+ .Replace(TOKEN_DatabaseName, database.Name, StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_BracketEscapedDatabaseName, database.Name.Replace("]", "]]"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_SingleQuoteEscapedDatabaseName, database.Name.Replace("'", "''"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_QuoteAndBracketEscapedDatabaseName, database.Name.Replace("'", "''").Replace("]", "]]"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_ServerName, trueServerName.ToUpper(), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_ServerInternalName, internalServerName, StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_BracketEscapedServerName, trueServerName.ToUpper().Replace("]", "]]"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_BracketEscapedServerInternalName, internalServerName.Replace("]", "]]"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_SingleQuoteEscapedServerName, trueServerName.ToUpper().Replace("'", "''"), StringComparison.CurrentCultureIgnoreCase)
+ .Replace(TOKEN_RandomGuid, Guid.NewGuid().ToString())
+ .Replace(TOKEN_ServerVersionString, svr.VersionString)
+ .Replace(TOKEN_ServerVersion, svr.Version.ToString())
+ .Replace(TOKEN_DatabaseProductVersion, database.ExecutionManager.ConnectionContext.ProductVersion.ToString())
+ .Replace(TOKEN_RandomTSqlPassword, String.Format("PASSWORD='{0}'", Guid.NewGuid()))
+ .Replace(TOKEN_RandomTSqlSecret, String.Format("SECRET='{0}'", Guid.NewGuid()));
+
+ //If the server is domain-qualified (we'll assume by the presence of a .) then untokenize the domain name token
+ int dotIndex = svr.Name.IndexOf('.');
+ if (dotIndex >= 0)
+ {
+ string domain = svr.Name.Substring(dotIndex);
+ ret = ret.Replace(TOKEN_ClusterDomainName, domain);
+ }
+
+ // Replace SQL Mi random paths
+ if (svr.DatabaseEngineEdition == DatabaseEngineEdition.SqlManagedInstance)
+ {
+ // 'dynamic' tokens to match different filegroups on sql MI (names are GUIDs)
+ foreach (FileGroup fileGroup in database.FileGroups)
+ {
+ foreach (DataFile dataFile in fileGroup.Files)
+ {
+ ret = ret.Replace($"$({ dataFile.Name }_FileName)", dataFile.FileName);
+ }
+ }
+
+ // PrimaryFilePath must be probed last because it's contained in almost all of the other paths (except for backup dir)
+ ret = ret
+ .Replace(TOKEN_MasterDBPath, svr.MasterDBPath)
+ .Replace(TOKEN_MasterDBLogPath, svr.MasterDBLogPath)
+ .Replace(TOKEN_BackupDirectory, svr.BackupDirectory)
+ .Replace(TOKEN_LogFileName, database.LogFiles[0].FileName)
+ .Replace(TOKEN_PrimaryFilePath, database.PrimaryFilePath);
+ }
+
+ var defaultDataPath = string.Empty;
+ var backupDirectory = string.Empty;
+ var errorLogPath = string.Empty;
+
+ if (svr.ServerType != DatabaseEngineType.SqlAzureDatabase)
+ {
+ //Azure doesn't have this property, so we'll just leave it as an empty string
+ defaultDataPath = string.IsNullOrEmpty(svr.Settings.DefaultFile) ? svr.MasterDBPath : svr.Settings.DefaultFile;
+ //MasterDbPath doesn't have trailing slash so add it if it doesn't exist
+ defaultDataPath += defaultDataPath.EndsWith(@"\") ? "" : @"\";
+
+ backupDirectory = string.IsNullOrEmpty(svr.Settings.BackupDirectory) ? svr.BackupDirectory : svr.Settings.BackupDirectory;
+
+ errorLogPath = svr.ErrorLogPath;
+ }
+ ret = ret.Replace(TOKEN_DefaultDataPath, defaultDataPath, StringComparison.CurrentCultureIgnoreCase);
+ ret = ret.Replace(TOKEN_BackupDirectory, backupDirectory, StringComparison.CurrentCultureIgnoreCase);
+ ret = ret.Replace(TOKEN_SingleQuoteEscapedBackupDirectory, backupDirectory, StringComparison.CurrentCultureIgnoreCase);
+ ret = ret.Replace(TOKEN_ErrorLogPath, errorLogPath, StringComparison.CurrentCultureIgnoreCase);
+ ret = ret.Replace(TOKEN_SingleQuoteEscapedErrorLogPath, errorLogPath, StringComparison.CurrentCultureIgnoreCase);
+
+ //Replace all of the secret store tokens with their retrieved values
+ //Note we prefix the secret name with a common test prefix before retrieving it
+ foreach (Match match in SecretStoreRegex.Matches(ret))
+ {
+ if (azureKeyVaultHelper == null)
+ {
+ throw new ArgumentNullException(nameof(azureKeyVaultHelper));
+ }
+ var secretName = match.Groups["SecretName"].Value;
+ string secretValue;
+ try
+ {
+ secretValue = azureKeyVaultHelper.GetDecryptedSecret(AzureKeyVaultHelper.SSMS_TEST_SECRET_PREFIX + secretName);
+ }
+ catch
+ {
+ secretValue = azureKeyVaultHelper.GetDecryptedSecret(secretName);
+ }
+ ret = ret.Replace(match.Value, secretValue);
+ }
+
+ if (trueServerName.Contains(@"\"))
+ {
+ ret = ret.Replace(TOKEN_ComputerName, trueServerName.Split('\\')[0],
+ StringComparison.CurrentCultureIgnoreCase);
+ }
+
+ ret = ret.Replace(TOKEN_DataSource, database.ExecutionManager.ConnectionContext.ServerInstance);
+ return ret;
+ }
+
+ ///
+ /// Removes multi-line T-SQL comments (blocks that start with /* and end with */) and then
+ /// returns a trimmed version of the new string
+ ///
+ ///
+ ///
+ public static string RemoveMultiLineComments(string s)
+ {
+ return MultiLineCommentsRegex.Replace(s, String.Empty).Trim();
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Helpers/ServerObjectHelpers.cs b/src/FunctionalTest/Framework/Helpers/ServerObjectHelpers.cs
new file mode 100644
index 00000000..f543e83d
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/ServerObjectHelpers.cs
@@ -0,0 +1,625 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using System;
+using System.Collections.Generic;
+using System.Data;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Diagnostics;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using Microsoft.SqlServer.Management.Sdk.Sfc;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using TraceHelper = Microsoft.SqlServer.Test.Manageability.Utils.Helpers.TraceHelper;
+using SMO = Microsoft.SqlServer.Management.Smo;
+using System.Threading.Tasks;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods, constants, etc dealing with the SMO Server object
+ ///
+ public static class ServerObjectHelpers
+ {
+ static readonly Semaphore azureDbCreateLock = new Semaphore(3, 3);
+
+ ///
+ /// Restores a database from the specified backup file. It's the callers responsibility to ensure the server
+ /// has read access for the file.
+ /// The files in the backup will be moved to the default data/log locations for the server
+ ///
+ ///
+ /// Path of backup file for restoring database
+ /// Name of restored database
+ /// Restored database
+ internal static Database RestoreDatabaseFromBackup(this SMO.Server server, string dbBackupFile, string dbName)
+ {
+ // we may be using a file already on the server with a local path
+ if (!File.Exists(dbBackupFile))
+ {
+ Trace.TraceWarning("DB Backup File '{0}' is not visible to the test", dbBackupFile);
+ }
+
+ // Get the default location where we should place the restored data files
+ string dataFilePath = String.IsNullOrEmpty(server.Settings.DefaultFile) ? server.MasterDBPath : server.Settings.DefaultFile;
+ if (String.IsNullOrWhiteSpace(dataFilePath))
+ {
+ // We failed to get the path
+ throw new InvalidOperationException("Could not get database file path for restoring from backup");
+ }
+
+ // Get the default location where we should place the restored log files
+ string logFilePath = String.IsNullOrEmpty(server.Settings.DefaultLog) ? server.MasterDBLogPath : server.Settings.DefaultLog;
+ if (String.IsNullOrWhiteSpace(logFilePath))
+ {
+ // We failed to get the path
+ throw new InvalidOperationException("Could not get database log file path for restoring from backup");
+ }
+
+ var restore = new Restore
+ {
+ Database = dbName,
+ Action = RestoreActionType.Database
+ };
+ restore.Devices.AddDevice(dbBackupFile, DeviceType.File);
+ DataTable dt = restore.ReadFileList(server);
+
+ //The files need to be moved to avoid collisions
+ int index = 0;
+ foreach (DataRow row in dt.Rows)
+ {
+ //Type == L means it's a log file so put it in the log file location, all others go to data file location
+ string filePath = "L".Equals(row["Type"] as string, StringComparison.OrdinalIgnoreCase)
+ ? logFilePath
+ : dataFilePath;
+ //Unique filename so new files don't collide either
+ string fileName = dbName + "_" + index + Path.GetExtension(row["PhysicalName"] as string);
+ restore.RelocateFiles.Add(new RelocateFile(row["LogicalName"] as string, Path.Combine(filePath, fileName)));
+ ++index;
+ }
+ TraceHelper.TraceInformation(String.Format("Restoring database '{0}' from backup file '{1}'", dbName, dbBackupFile));
+ restore.SqlRestore(server);
+
+ server.Databases.Refresh();
+ return server.Databases[dbName];
+ }
+
+ ///
+ /// Check if certain database specified by databaseName exists on the server
+ ///
+ ///
+ /// Name of database
+ /// True if database exists, false otherwise
+ internal static bool CheckDatabaseExistence(this SMO.Server server, string databaseName)
+ {
+ // Start another SqlConneciton using same ConnectionString as the current server object,
+ // use T-Sql script exectution instead of SMO way to avoid unsolved SMO issues
+ using (var conn = new SqlConnection(server.ConnectionContext.ConnectionString))
+ {
+ conn.Open();
+ using (var cmd = conn.CreateCommand())
+ {
+ cmd.CommandText = String.Format(
+ CultureInfo.InvariantCulture,
+ "SELECT count(*) FROM sys.databases WHERE name = {0}",
+ SmoObjectHelpers.SqlSingleQuoteString(databaseName));
+ int rowCount = (int)cmd.ExecuteScalar();
+ if (rowCount > 0)
+ {
+ return true;
+ };
+ }
+ }
+ return false;
+ }
+
+ ///
+ /// Returns the Drives available on the server. Windows drives will have letters like C:, while
+ /// Linux may just have the root /
+ ///
+ ///
+ ///
+ public static IEnumerable EnumerateDrives(this SMO.Server server)
+ {
+ var req = new Request()
+ {
+ Urn = "Server/Drive",
+ Fields = new[] { "Name" }
+ };
+ return ((DataSet)(new Enumerator().Process(server.ConnectionContext, req)))
+ .Tables[0]
+ .Rows.OfType()
+ .Select(r => r[0] as string);
+
+ }
+
+ public const string NameColumn = "Name";
+ public const string IsFileColumn = "IsFile";
+ public const string FullNameColumn = "FullName";
+
+ ///
+ /// Returns a DataTable whose rows have Name,IsFile,FullName columns for each file or folder
+ /// in the given folder
+ ///
+ ///
+ /// Directory path on the server
+ ///
+ public static DataTable EnumerateFilesAndFolders(this SMO.Server server, string path)
+ {
+ var orderByIsfile = new OrderBy { Field = "IsFile" };
+ var orderByName = new OrderBy { Field = "Name" };
+ var req = new Request
+ {
+ Urn = "Server/File[@Path='" + Urn.EscapeString(path) + "']",
+ Fields = new[] { NameColumn, IsFileColumn, FullNameColumn },
+ OrderByList = new[] { orderByIsfile, orderByName }
+ };
+
+ DataSet ds = new Enumerator().Process(server.ConnectionContext, req);
+ return ds.Tables[0];
+ }
+
+ ///
+ /// Creates a database definition with the specified parameters on the specified server. This creates the local
+ /// definition only, it does not call Create() on the Database.
+ ///
+ /// The server to create the database definition for
+ /// The prefix to give the database name
+ /// The Azure edition to use when creating an Azure database
+ /// The Database object representing the definition
+ public static Database CreateDatabaseDefinition(
+ this SMO.Server server,
+ string dbNamePrefix = "",
+ SqlTestBase.AzureDatabaseEdition dbAzureDatabaseEdition = SqlTestBase.AzureDatabaseEdition.NotApplicable
+ )
+ {
+ Database db = null;
+ string databaseName = SmoObjectHelpers.GenerateUniqueObjectName(dbNamePrefix);
+
+ try
+ {
+ TraceHelper.TraceInformation("Creating new database '{0}' on server '{1}'", databaseName,
+ server.Name);
+ // No valid backup file location so default to creating our own
+ switch (dbAzureDatabaseEdition)
+ {
+ // We set ReadOnly to make sure we exercise code paths in Database.ScriptCreate based
+ // on the property being non-null
+ case SqlTestBase.AzureDatabaseEdition.NotApplicable:
+ {
+ db = new Database(server, databaseName) { ReadOnly = false };
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.DataWarehouse:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDataWarehouse)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ AzureServiceObjective = "DW100c",
+ MaxSizeInBytes = 1024.0 * 1024.0 * 1024.0 * 500,
+ ReadOnly = false
+ };
+ //500GB
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.Hyperscale:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDatabase)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ AzureServiceObjective = "HS_Gen5_2",
+ // Shake out issues that only arise in case sensitive collations
+ Collation = "SQL_Latin1_General_CP1_CS_AS",
+ CatalogCollation = CatalogCollationType.DatabaseDefault,
+ MaxSizeInBytes = 0,
+ CompatibilityLevel = CompatibilityLevel.Version160,
+ ReadOnly = false
+ };
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.Basic:
+ case SqlTestBase.AzureDatabaseEdition.Standard:
+ case SqlTestBase.AzureDatabaseEdition.Premium:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDatabase)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ MaxSizeInBytes = 1024.0 * 1024.0 * 1024.0,
+ CompatibilityLevel = CompatibilityLevel.Version160,
+ ReadOnly = false
+ };
+ //1GB
+ break;
+ }
+ default:
+ throw new InvalidOperationException(
+ string.Format(
+ "Can't recognize Azure SQL database edition '{0}' specified for current database",
+ dbAzureDatabaseEdition));
+ }
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ string message = string.Format(
+ "CreateDatabaseDefinition failed when targeting server {0}. Message:\n{1}\nStack Trace:\n{2}",
+ server.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ Trace.TraceError(message);
+ throw new InternalTestFailureException(message, e);
+ }
+
+ return db;
+ }
+
+ ///
+ /// Creates a database with the specified parameters on the specified server.
+ ///
+ /// The server to create the database on
+ /// The prefix to give the database name
+ /// The Azure edition to use when creating an Azure database
+ /// If specified the database backup file to use to create the server
+ /// The Database object representing the database on the server
+ public static Database CreateDatabaseWithRetry(
+ this SMO.Server server,
+ string dbNamePrefix = "",
+ SqlTestBase.AzureDatabaseEdition dbAzureDatabaseEdition = SqlTestBase.AzureDatabaseEdition.NotApplicable,
+ string dbBackupFile = ""
+ )
+ {
+ Database db = null;
+
+ RetryHelper.RetryWhenExceptionThrown(
+ () =>
+ {
+ string databaseName = SmoObjectHelpers.GenerateUniqueObjectName(dbNamePrefix);
+ try
+ {
+
+ if (string.IsNullOrEmpty(dbBackupFile))
+ {
+ if (server.DatabaseEngineType != DatabaseEngineType.SqlAzureDatabase)
+ {
+ db = new Database(server, databaseName) { ReadOnly = false };
+ }
+ else
+ {
+
+ TraceHelper.TraceInformation("Creating new database '{0}' on server '{1}'", databaseName,
+ server.Name);
+ // No valid backup file location so default to creating our own
+ switch (dbAzureDatabaseEdition)
+ {
+ // We set ReadOnly to make sure we exercise code paths in Database.ScriptCreate based
+ // on the property being non-null
+ case SqlTestBase.AzureDatabaseEdition.NotApplicable:
+ {
+ if (server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand)
+ {
+ db = new Database(server, databaseName);
+ }
+ else
+ {
+ db = new Database(server, databaseName) { ReadOnly = false, CompatibilityLevel = CompatibilityLevel.Version160 };
+ }
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.DataWarehouse:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDataWarehouse)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ // newer regions don't support DW100c but dw1000c times out too often
+ AzureServiceObjective = "DW100c",
+ MaxSizeInBytes = 1024.0 * 1024.0 * 1024.0 * 500,
+ ReadOnly = false
+ };
+ //500GB
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.Hyperscale:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDatabase)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ AzureServiceObjective = "HS_Gen5_2",
+ // Shake out issues that only arise in case sensitive collations
+ Collation = "SQL_Latin1_General_CP1_CS_AS",
+ CatalogCollation = CatalogCollationType.DatabaseDefault,
+ MaxSizeInBytes = 0,
+ CompatibilityLevel = CompatibilityLevel.Version160,
+ ReadOnly = false
+ };
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.Basic:
+ case SqlTestBase.AzureDatabaseEdition.Standard:
+ case SqlTestBase.AzureDatabaseEdition.Premium:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDatabase)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ MaxSizeInBytes = 1024.0 * 1024.0 * 1024.0,
+ CompatibilityLevel = CompatibilityLevel.Version160,
+ ReadOnly = false
+ };
+ //1GB
+ break;
+ }
+ case SqlTestBase.AzureDatabaseEdition.GeneralPurpose:
+ case SqlTestBase.AzureDatabaseEdition.BusinessCritical:
+ {
+ db = new Database(server, databaseName,
+ DatabaseEngineEdition.SqlDatabase)
+ {
+ AzureEdition = dbAzureDatabaseEdition.ToString(),
+ CompatibilityLevel = CompatibilityLevel.Version160,
+ ReadOnly = false
+ };
+ break;
+ }
+ default:
+ throw new InvalidOperationException(
+ string.Format(
+ "Can't recognize Azure SQL database edition '{0}' specified for current database",
+ dbAzureDatabaseEdition));
+ }
+ }
+ // Reduce contention for Azure resources by limiting the number of simultaneous creates
+ if (server.DatabaseEngineType == DatabaseEngineType.SqlAzureDatabase)
+ {
+ try
+ {
+ azureDbCreateLock.WaitOne();
+ db.Create();
+ // Give the db a few seconds to be ready for action
+ Thread.Sleep(5000);
+ }
+ finally
+ {
+ azureDbCreateLock.Release();
+ }
+ }
+ else
+ {
+ db.Create();
+ }
+ }
+ else
+ {
+ // Restore DB from the specified backup file
+ TraceHelper.TraceInformation("Restoring database '{0}' from backup file '{1}'",
+ databaseName,
+ dbBackupFile);
+ db = server.RestoreDatabaseFromBackup(dbBackupFile, databaseName);
+ }
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ string message = string.Format(
+ "CreateDatabaseWithRetry failed when targeting server {0}. Message:\n{1}\nStack Trace:\n{2}",
+ server.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ Trace.TraceError(message);
+ throw new InternalTestFailureException(message, e);
+ }
+ }, retries: 3, retryDelayMs: 30000,
+ retryMessage: "Creating Initial DB failed");
+ return db;
+ }
+
+ ///
+ /// Creates a snapshot of the specified DB
+ ///
+ ///
+ ///
+ ///
+ public static Database CreateDbSnapshotWithRetry(this SMO.Server server, Database db)
+ {
+ Database dbSnapshot = null;
+ RetryHelper.RetryWhenExceptionThrown(
+ () =>
+ {
+ string databaseSnapshotName = db.Name + "_ss";
+ TraceHelper.TraceInformation("Creating database snapshot '{0}' on server '{1}'",
+ databaseSnapshotName,
+ server.Name);
+ try
+ {
+ dbSnapshot = new Database(server, databaseSnapshotName)
+ {
+ DatabaseSnapshotBaseName = db.Name
+ };
+
+ foreach (FileGroup fg in db.FileGroups)
+ {
+ dbSnapshot.FileGroups.Add(new FileGroup(dbSnapshot, fg.Name));
+ foreach (DataFile df in fg.Files)
+ {
+ dbSnapshot.FileGroups[fg.Name].Files.Add(
+ new DataFile(dbSnapshot.FileGroups[fg.Name], df.Name,
+ Path.Combine(db.PrimaryFilePath, df.Name + Guid.NewGuid() + ".ss")));
+ }
+
+ }
+
+ dbSnapshot.Create();
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ string message = string.Format(
+ "CreateDbSnapshotWithRetry failed when targeting server {0}. Message:\n{1}\nStack Trace:\n{2}",
+ server.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ Trace.TraceError(message);
+ throw new InternalTestFailureException(message, e);
+ }
+ }, retries: 3, retryDelayMs: 30000,
+ retryMessage: "Creating Snapshot of initial DB failed");
+
+ return dbSnapshot;
+ }
+
+ ///
+ /// Attempts to drop the specified database, but does not throw if an error occurs. Will kill
+ /// all connections to the DB for on-prem servers first (this functionality is not supported on Azure)
+ ///
+ /// The server containing the DB we want to drop
+ /// The name of the DB to drop
+ /// TRUE if the DB was successfully dropped, FALSE otherwise (either error occurred or it didn't exist)
+ public static bool DropKillDatabaseNoThrow(this SMO.Server server, string dbName)
+ {
+ bool dbDropped = false;
+ try
+ {
+ bool dbExists = server.CheckDatabaseExistence(dbName);
+ if (dbExists)
+ {
+ if (server.DatabaseEngineType == DatabaseEngineType.SqlAzureDatabase)
+ {
+ TraceHelper.TraceInformation("Dropping database [{0}] on server [{1}]", dbName, server.Name);
+ // Calling Database.Drop doesn't work for Azure SQL DW so drop at the server level, which in our case is always master
+ server.ExecutionManager.ExecuteNonQuery($"DROP DATABASE {dbName.SqlBracketQuoteString()}");
+ }
+ else
+ {
+ TraceHelper.TraceInformation("Dropping database [{0}] and closing all open connections on server [{1}]",
+ dbName,
+ server.Name);
+ // DROP DATABASE can take 5 minutes on MI, so delegate it to a background worker and ignore any errors.
+ if (server.DatabaseEngineEdition == DatabaseEngineEdition.SqlManagedInstance)
+ {
+ var newServer = new SMO.Server(server.ConnectionContext.Copy());
+ Task.Run(() => newServer.KillDatabase(dbName));
+ }
+ else
+ {
+ server.KillDatabase(dbName);
+ }
+ }
+ dbDropped = true;
+ }
+ }
+ catch (Exception e)
+ {
+ // Log this but don't re-throw since we won't consider this a test failure
+ Trace.TraceWarning(
+ "Got exception trying to drop test db [{0}] on server [{1}]\nMessage: {2}\nStack Trace:\n{3}",
+ dbName,
+ server.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ }
+ return dbDropped;
+ }
+
+ ///
+ /// Returns the Instance-qualified net name of the specified server
+ ///
+ ///
+ ///
+ public static string NetNameWithInstance(this SMO.Server server)
+ {
+ return !server.IsSupportedProperty("NetName") ?
+ server.Name :
+ server.NetName +
+ (string.IsNullOrWhiteSpace(server.InstanceName) ? string.Empty : @"\" + server.InstanceName);
+ }
+
+ ///
+ /// Create a database and take full backup
+ ///
+ /// The target server
+ /// The newly created dataase
+ public static Database CreateDatabaseAndTakeFullBackup(this SMO.Server server)
+ {
+ // HADR cannot handle the database with special characters in the name
+ // defect 11008593 is tracking this issue
+ SMO.Database db = server.CreateDatabaseWithRetry();
+ db.TakeFullBackup();
+ return db;
+ }
+
+ ///
+ /// Creates a new Login on this server
+ ///
+ /// The server to create the login for
+ /// The name to give the login
+ /// The type of login
+ /// Optional - the password to give the login
+ /// Optional - the options when creating the login
+ /// Whether to create the login and then immediately disable it
+ /// The Login object
+ public static Login CreateLogin(this SMO.Server server, string name, LoginType loginType, string password = null, LoginCreateOptions? loginCreateOptions = null, bool isDisabled = false)
+ {
+ var login = CreateLoginDefinition(server, name, loginType);
+ TraceHelper.TraceInformation("Creating login on server '{0}' of type '{1}' with name '{2}'{3}{4}",
+ server.NetNameWithInstance(),
+ loginType,
+ name,
+ loginCreateOptions.HasValue ? " and LoginCreateOptions " + loginCreateOptions.Value : string.Empty,
+ isDisabled ? " and disabled" : string.Empty);
+ if (string.IsNullOrEmpty(password))
+ {
+ login.Create();
+ }
+ else
+ {
+ if (loginCreateOptions.HasValue)
+ {
+ login.Create(password, loginCreateOptions.Value);
+ }
+ else
+ {
+ login.Create(password);
+ }
+ }
+
+ if (isDisabled)
+ {
+ login.Disable();
+ }
+
+ return login;
+ }
+
+ ///
+ /// Creates a new Login definition for this server. This does not actually
+ /// create the Login - just the local definition.
+ ///
+ /// The server to create the login for
+ /// The name to give the login
+ /// The type of login
+ /// The Login object
+ public static Login CreateLoginDefinition(this SMO.Server server, string name, LoginType loginType)
+ {
+ TraceHelper.TraceInformation("Creating login definition of type '{0}' with name '{1}'", loginType, name);
+ var login = new Login(server, name)
+ {
+ LoginType = loginType,
+ };
+ return login;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/SmoObjectHelpers.cs b/src/FunctionalTest/Framework/Helpers/SmoObjectHelpers.cs
new file mode 100644
index 00000000..28c39398
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/SmoObjectHelpers.cs
@@ -0,0 +1,195 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using System;
+using System.Diagnostics;
+using TraceHelper = Microsoft.SqlServer.Test.Manageability.Utils.Helpers.TraceHelper;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods and values for general SMO objects
+ ///
+ public static class SmoObjectHelpers
+ {
+ ///
+ /// Generates a unique object name, with optional prefix. This name also optionally will
+ /// add the following characters to test that object names are escaped correctly :
+ ///
+ /// Single-Quote '
+ /// Double Single-Quote ''
+ /// Closing Bracket ]
+ /// Double Closing Bracket ]]
+ /// {enclosed guid}
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static string GenerateUniqueObjectName(string dbNamePrefix = "",
+ bool includeClosingBracket = true,
+ bool includeDoubleClosingBracket = true,
+ bool includeSingleQuote = true,
+ bool includeDoubleSingleQuote = true)
+ {
+ return string.Format("{0}{1}{2}{3}{4}{5}",
+ dbNamePrefix ?? string.Empty,
+ includeDoubleSingleQuote ? "''" : string.Empty,
+ includeDoubleClosingBracket ? "]]" : string.Empty,
+ includeClosingBracket ? "]" : string.Empty,
+ includeSingleQuote ? "'" : string.Empty,
+ "{" + Guid.NewGuid() + "}");
+ }
+
+ ///
+ /// Escapes a character in a string using the normal SQL method of replacing all
+ /// instances of that character with that character repeated two times (so ' becomes '')
+ ///
+ ///
+ ///
+ ///
+ public static string SqlEscapeString(string str, char escapeChar)
+ {
+ string escapeString = escapeChar.ToString();
+ return str.Replace(escapeString, escapeString + escapeString);
+ }
+
+ ///
+ /// Escapes a string by replacing all instances of ] with ]]
+ ///
+ ///
+ ///
+ public static string SqlEscapeClosingBracket(this string str)
+ {
+ return SqlEscapeString(str, ']');
+ }
+
+ ///
+ /// Escapes a string by replacing all instances of ' with ''
+ ///
+ ///
+ ///
+ public static string SqlEscapeSingleQuote(this string str)
+ {
+ return SqlEscapeString(str, '\'');
+ }
+
+ ///
+ /// Quotes a string in square brackets [], escaping the closing brackets
+ /// in the string as necessary
+ ///
+ ///
+ ///
+ public static string SqlBracketQuoteString(this string str)
+ {
+ return "[" + SqlEscapeClosingBracket(str) + "]";
+ }
+
+ ///
+ /// Quotes a string in single quotes '', escaping the single quotes
+ /// in the string as necessary
+ ///
+ ///
+ ///
+ public static string SqlSingleQuoteString(this string str)
+ {
+ return "'" + SqlEscapeSingleQuote(str) + "'";
+ }
+
+ ///
+ /// Creates an index on the optionally specified columns (defaulting to the first one if none are specified).
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static Microsoft.SqlServer.Management.Smo.Index CreateIndex(this TableViewTableTypeBase tableView,
+ string namePrefix,
+ IndexProperties indexProperties = null)
+ {
+ if (indexProperties == null)
+ {
+ //If caller doesn't specify index properties use defaults
+ indexProperties = new IndexProperties();
+ }
+
+ var index = new Microsoft.SqlServer.Management.Smo.Index(tableView, SmoObjectHelpers.GenerateUniqueObjectName(namePrefix));
+ if (indexProperties.Columns == null)
+ {
+ //Default to using first column if none were specified
+ indexProperties.Columns = new[] { tableView.Columns[0] };
+ }
+ foreach (Column column in indexProperties.Columns)
+ {
+ index.IndexedColumns.Add(new IndexedColumn(index, column.Name));
+
+ }
+ index.IndexType = indexProperties.IndexType;
+ index.IndexKeyType = indexProperties.KeyType;
+ index.IsClustered = indexProperties.IsClustered;
+ index.IsUnique = indexProperties.IsUnique;
+ index.OnlineIndexOperation = indexProperties.OnlineIndexOperation;
+
+ // Only set the resumable property if specified as true, since this can be run
+ // against server versions that don't support the resumable option.
+ if (indexProperties.Resumable)
+ {
+ index.ResumableIndexOperation = indexProperties.Resumable;
+ }
+
+ TraceHelper.TraceInformation("Creating new index \"{0}\" with IndexType {1} KeyType {2}, IsClustered {3}, IsUnique {4}, IsOnline {5}, IsResumable {6} and {7} columns",
+ index.Name,
+ indexProperties.IndexType,
+ indexProperties.KeyType,
+ indexProperties.IsClustered,
+ indexProperties.IsUnique,
+ indexProperties.OnlineIndexOperation,
+ indexProperties.Resumable,
+ indexProperties.Columns.Length);
+ index.Create();
+ return index;
+ }
+
+ ///
+ /// Safely calls Drop() on a set of objects. This will
+ /// catch any exceptions thrown when calling Drop(), log them and then move
+ /// on to the next object.
+ ///
+ ///
+ public static void SafeDrop(params IDroppable[] objs)
+ {
+ foreach (IDroppable obj in objs)
+ {
+ if (obj == null)
+ {
+ continue;
+ }
+
+ string name = "";
+ try
+ {
+ name = obj.ToString();
+ NamedSmoObject namedObj = obj as NamedSmoObject;
+ if (namedObj != null)
+ {
+ name = namedObj.Name;
+ }
+
+ TraceHelper.TraceInformation("Safely dropping object {0}", name);
+ obj.Drop();
+ }
+ catch (Exception e)
+ {
+ //Don't want to throw even if an error occurs, just log it for
+ //debugging purposes
+ Trace.TraceWarning("Exception trying to drop object {0} - {1}. Ignoring as this object is being safely dropped.", name, e.Message);
+ }
+ }
+
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/SqlTestRandom.cs b/src/FunctionalTest/Framework/Helpers/SqlTestRandom.cs
new file mode 100644
index 00000000..7892a105
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/SqlTestRandom.cs
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Security.Cryptography;
+using System.Text;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ public static class SqlTestRandom
+ {
+ private static Random _random = new Random();
+
+ ///
+ /// Generates an array of the specified length of random bytes
+ ///
+ ///
+ ///
+ public static byte[] GenerateRandomBytes(int length)
+ {
+ byte[] bytes = new byte[length];
+ _random.NextBytes(bytes);
+ return bytes;
+ }
+
+ ///
+ /// Use crypto to generate a random set of bytes, then
+ /// convert those to a string.
+ ///
+ /// This method was taken from DACFx's StringUtils class
+ public static string GeneratePassword()
+ {
+ // Create a byte array to hold the random value.
+ byte[] randomNumber = new byte[48];
+ Random randomChar = new Random(Environment.TickCount);
+
+ RandomNumberGenerator provider = RandomNumberGenerator.Create();
+ provider.GetBytes(randomNumber);
+
+ byte[] complexity = { 0x6d, 0x73, 0x46, 0x54, 0x37, 0x5f, 0x26, 0x23, 0x24, 0x21, 0x7e, 0x3c };
+ Array.Copy(complexity, 0, randomNumber, randomNumber.GetLength(0) / 2, complexity.GetLength(0));
+
+ StringBuilder sb = new StringBuilder();
+ List badChars = new List();
+ badChars.Add('\''); // single quote is a bad character
+ badChars.Add('-');
+ badChars.Add('*');
+ badChars.Add('/');
+ badChars.Add('\\');
+ badChars.Add('\"');
+ badChars.Add('[');
+ badChars.Add(']');
+ badChars.Add(')');
+ badChars.Add('(');
+ for (int i = 0; i < randomNumber.GetLength(0); i++)
+ {
+ if (randomNumber[i] == 0) ++randomNumber[i];
+ char ch = Convert.ToChar(randomNumber[i]);
+ if ((int)ch < 32 ||
+ (int)ch > 126 ||
+ badChars.Contains(ch))
+ {
+ ch = (char)((int)'a' + randomChar.Next(0, 125 - (int)'a')); //replacing bad character with 'a' + random
+ }
+ sb.Append(ch);
+ }
+
+ return sb.ToString();
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/SqlTypeConverterHelpers.cs b/src/FunctionalTest/Framework/Helpers/SqlTypeConverterHelpers.cs
new file mode 100644
index 00000000..d92481d2
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/SqlTypeConverterHelpers.cs
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ ///
+ /// Helper methods dealing with conversion of types
+ ///
+ public static class SqlTypeConverterHelpers
+ {
+ public static object ConvertToType(Type t, object obj)
+ {
+ if (t == typeof (DataType))
+ {
+ return new DataType(DataType.SqlToEnum(obj.ToString()));
+ }
+ else if (t.IsEnum)
+ {
+ return obj is string ? Enum.Parse(t, (string)obj, true) : Enum.ToObject(t, obj);
+ }
+ else
+ {
+ return Convert.ChangeType(obj, t);
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/StringCollectionHelpers.cs b/src/FunctionalTest/Framework/Helpers/StringCollectionHelpers.cs
new file mode 100644
index 00000000..e22c760b
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/StringCollectionHelpers.cs
@@ -0,0 +1,31 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+
+using System.Collections.Specialized;
+using System.Linq;
+using System.Text;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helpful extensions methods on .
+ ///
+ public static class StringCollectionHelpers
+ {
+ ///
+ /// Merges the contents of a StringCollection into a single string, with each string being on a new line.
+ ///
+ /// The string collection to convert.
+ /// A single string with the contents of the StringCollection
+ public static string ToSingleString(this StringCollection sc)
+ {
+ if (sc == null)
+ {
+ return string.Empty;
+ }
+
+ return sc.Cast().Aggregate(new StringBuilder(), (sb, s) => sb.AppendLine(s)).ToString();
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/StringExtensions.cs b/src/FunctionalTest/Framework/Helpers/StringExtensions.cs
new file mode 100644
index 00000000..095637c2
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/StringExtensions.cs
@@ -0,0 +1,192 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.ComponentModel;
+using System.Runtime.InteropServices;
+using System.Security;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helpful extensions methods on .
+ ///
+ public static class StringExtensions
+ {
+ ///
+ /// Calls string.Format with the specified args
+ ///
+ ///
+ ///
+ ///
+ public static string FormatStr(this string str, params object[] args)
+ {
+ return string.Format(str, args);
+ }
+ ///
+ /// Replaces all occurrences of each key in the given dictionary with the corresponding value.
+ ///
+ /// The string to replace in.
+ /// The dictionary (or in general: enumeration of key-value pairs) of key-value pairs to replace.
+ /// The string with the keys replaced with values.
+ public static string Replace(this string target, System.Collections.Generic.IEnumerable> values)
+ {
+ foreach (System.Collections.Generic.KeyValuePair value in values)
+ {
+ target = target.Replace(value.Key, value.Value);
+ }
+ return target;
+ }
+
+ ///
+ /// String replace that takes a StringComparison to specify the type of StringComparison to use
+ ///
+ /// The string to replace in
+ /// The old string value to replace
+ /// The new string value to replace with
+ /// The type of string comparison to use
+ /// The string with all occurances of oldValue replaced with newValue
+ public static string Replace(this string str, string oldValue, string newValue, System.StringComparison comparison)
+ {
+ if (oldValue == null)
+ {
+ throw new System.ArgumentNullException("oldValue");
+ }
+ if (oldValue == string.Empty)
+ {
+ throw new System.ArgumentException("String must be non-empty", "oldValue");
+ }
+ if (newValue == null)
+ {
+ throw new System.ArgumentNullException("newValue");
+ }
+ System.Text.StringBuilder sb = new System.Text.StringBuilder();
+ int previousIndex = 0;
+ for (int index = str.IndexOf(oldValue, comparison); index != -1; index = str.IndexOf(oldValue, index, comparison))
+ {
+ sb.Append(str.Substring(previousIndex, index - previousIndex));
+ sb.Append(newValue);
+ index += oldValue.Length;
+ previousIndex = index;
+ }
+ sb.Append(str.Substring(previousIndex));
+ return sb.ToString();
+ }
+
+ ///
+ /// Converts the specified string to type .
+ /// Extension method on .
+ ///
+ /// is null.
+ /// The type to convert the string to.
+ /// The string value to convert.
+ /// Result of the conversion.
+ public static T ConvertTo(this string value)
+ {
+ if (string.IsNullOrEmpty(value))
+ {
+ throw new System.ArgumentNullException("value");
+ }
+ TypeConverter converter = TypeDescriptor.GetConverter(typeof(T));
+ if (converter == null)
+ {
+ throw new System.InvalidOperationException("Unable to find a converter for " + typeof(T).FullName);
+ }
+ return (T)((object)converter.ConvertFromString(value));
+ }
+
+ ///
+ /// Converts the specified string to type or returns
+ /// if it cannot convert the string.
+ /// Extension method on .
+ ///
+ /// The type to convert the string to.
+ /// The string value to convert.
+ /// The default value to return if it is unable to convert the string.
+ ///
+ /// Result of the conversion or default value.
+ ///
+ public static T ConvertToOrDefault(this string value, T defaultValue)
+ {
+ if (string.IsNullOrEmpty(value))
+ {
+ return defaultValue;
+ }
+ TypeConverter converter = TypeDescriptor.GetConverter(typeof(T));
+ if (converter == null)
+ {
+ throw new System.InvalidOperationException("Unable to find a converter for " + typeof(T).FullName);
+ }
+ if (!converter.IsValid(value))
+ {
+ return defaultValue;
+ }
+ return (T)((object)converter.ConvertFromString(value));
+ }
+
+ ///
+ /// Converts secure string to string
+ ///
+ /// Secure string
+ /// secure string converted to string
+ public static string SecureStringToString(this SecureString secureString)
+ {
+ if (secureString == null)
+ {
+ return null;
+ }
+
+ char[] charArray = new char[secureString.Length];
+ IntPtr ptr = Marshal.SecureStringToGlobalAllocUnicode(secureString);
+
+ try
+ {
+ Marshal.Copy(ptr, charArray, 0, secureString.Length);
+ }
+ finally
+ {
+ Marshal.ZeroFreeGlobalAllocUnicode(ptr);
+ }
+
+ return new string(charArray);
+ }
+
+ ///
+ /// Converts char array to secure string
+ ///
+ ///
+ /// Array of characters to secure string
+ public static SecureString StringToSecureString(this string str)
+ {
+ if (str == null)
+ {
+ return null;
+ }
+
+ var secureString = new SecureString();
+ foreach (char c in str)
+ {
+ secureString.AppendChar(c);
+ }
+
+ secureString.MakeReadOnly();
+
+ return secureString;
+ }
+
+ ///
+ /// Converts \r\n in input to Environment.NewLine
+ ///
+ ///
+ ///
+ public static string FixNewLines(this string input)
+ {
+ if (Environment.NewLine != "\r\n")
+ {
+ return input.Replace("\r\n", Environment.NewLine);
+ }
+
+ return input;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/TSqlScriptingHelper.cs b/src/FunctionalTest/Framework/Helpers/TSqlScriptingHelper.cs
new file mode 100644
index 00000000..89a4faeb
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/TSqlScriptingHelper.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Provides helper methods for TSql Scripting
+ ///
+ public static class TSqlScriptingHelper
+ {
+ ///
+ /// Records and returns the T-SQL script for the given action.
+ ///
+ /// Server to set the CaptureSql execution mode on.
+ /// The action whose script should be recorded.
+ /// The recorded script
+ public static string GenerateScriptForAction(SMO.Server server, Action action)
+ {
+ string result = null;
+
+ server.ExecuteWithModes(SqlExecutionModes.CaptureSql, () =>
+ {
+ action();
+ result = server.ExecutionManager.ConnectionContext.CapturedSql.Text.ToSingleString();
+ server.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ });
+
+ Assert.IsNotNull(result, "Could not record script");
+
+ return result;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/TableObjectHelpers.cs b/src/FunctionalTest/Framework/Helpers/TableObjectHelpers.cs
new file mode 100644
index 00000000..28426176
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/TableObjectHelpers.cs
@@ -0,0 +1,87 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Helper methods, constants, etc dealing with the SMO Table object
+ ///
+ public static class TableObjectHelpers
+ {
+ ///
+ /// Creates a FOR INSERT trigger definition with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static Trigger CreateTriggerDefinition(this Table table, string triggerNamePrefix, string textBody)
+ {
+ var trigger = new Trigger(table, SmoObjectHelpers.GenerateUniqueObjectName(triggerNamePrefix));
+ trigger.TextBody = textBody;
+ trigger.TextHeader = string.Format("CREATE TRIGGER {0} ON {1}.{2} FOR INSERT AS",
+ SmoObjectHelpers.SqlBracketQuoteString(trigger.Name),
+ SmoObjectHelpers.SqlBracketQuoteString(table.Schema),
+ SmoObjectHelpers.SqlBracketQuoteString(table.Name));
+ TraceHelper.TraceInformation("Creating new trigger definition \"{0}\" on table \"{1}.{2}\"", trigger.Name, table.Schema, table.Name);
+ return trigger;
+ }
+
+ ///
+ /// Creates a FOR INSERT trigger with a uniquely generated name prefixed by the specified prefix and defined with the specified
+ /// body and header. Optionally allows specifying the schema and whether the view is Schema Bound.
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static Trigger CreateTrigger(this Table table, string triggerNamePrefix, string textBody)
+ {
+ var trigger = table.CreateTriggerDefinition(triggerNamePrefix, textBody);
+ trigger.Create();
+ return trigger;
+ }
+
+ ///
+ /// Insert some ordered data into each row of the existed table.
+ /// Smo object.
+ /// The number of rows would be inserted.
+ public static Table InsertDataToTable(this Table table, int rowCount)
+ {
+ // Get the "@i, @i, ..., @i" string
+ string col_query = "";
+ int cnt = table.Columns.Count;
+ while (cnt != 0)
+ {
+ col_query += "@i";
+ cnt--;
+ if (cnt != 0)
+ {
+ col_query += ",";
+ }
+ }
+
+ // Generate the tsql of inserting data into table.
+ string query = string.Format(@"BEGIN TRAN
+ DECLARE @i INT
+ SET @i = 0
+ WHILE @i < {0}
+ BEGIN
+ INSERT INTO {1} VALUES ({2})
+ SET @i = @i + 1
+ END
+ COMMIT TRAN",
+ rowCount, table.ToString(), col_query);
+
+ TraceHelper.TraceInformation("Inserting {0} rows data into the created table {1}", rowCount.ToString(),
+ table.ToString());
+ table.Parent.ExecuteNonQuery(query);
+ table.Refresh();
+ return table;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Helpers/TestContextExtensions.cs b/src/FunctionalTest/Framework/Helpers/TestContextExtensions.cs
new file mode 100644
index 00000000..c85d65b8
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/TestContextExtensions.cs
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Linq;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ ///
+ /// Extension methods for TestContext
+ ///
+ public static class TestContextExtensions
+ {
+ ///
+ /// Tests the given name against the set of names in the SqlTestTargetServersFilter parameter.
+ /// The parameter is a semi-colon delimited list of allowed server names for the test run.
+ /// This filter is intersected with the environment variable of the same name.
+ ///
+ ///
+ ///
+ /// true if the filter is empty or if the given name is included in the list
+ public static bool SqlTestTargetServersFilter(this TestContext testContext, string serverName)
+ {
+ var strings = testContext.Properties["SqlTestTargetServersFilter"]?.ToString()
+ .Split(new[] {';'}, StringSplitOptions.RemoveEmptyEntries);
+ return strings?.Contains(serverName, StringComparer.OrdinalIgnoreCase) ?? true;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/TraceHelper.cs b/src/FunctionalTest/Framework/Helpers/TraceHelper.cs
new file mode 100644
index 00000000..22b7efcd
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/TraceHelper.cs
@@ -0,0 +1,50 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ using System.Diagnostics;
+
+ ///
+ /// Helper for configuring Trace
+ ///
+ public class TraceHelper
+ {
+ ///
+ /// Enable autoflush so that flush is called on trace after every write
+ ///
+ public static void EnableAutoFlush()
+ {
+ Trace.AutoFlush = true;
+ }
+
+ ///
+ /// Disable autoflush so that flush is not called on trace after every write
+ ///
+ public static void DisableAutoFlush()
+ {
+ Trace.AutoFlush = false;
+ }
+
+ ///
+ /// The default trace output lacks timestamps, this adds it
+ ///
+ ///
+ public static void TraceInformation(string message)
+ {
+ Trace.TraceInformation($"{DateTime.Now.ToString("o")} - {message}");
+ }
+
+ ///
+ /// The default trace output lacks timestamps, this adds it
+ ///
+ ///
+ ///
+ public static void TraceInformation(string format, params object[] args)
+ {
+ Trace.TraceInformation($"{DateTime.Now.ToString("o")} - {format}", args);
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Helpers/XmlHelper.cs b/src/FunctionalTest/Framework/Helpers/XmlHelper.cs
new file mode 100644
index 00000000..49273fb0
--- /dev/null
+++ b/src/FunctionalTest/Framework/Helpers/XmlHelper.cs
@@ -0,0 +1,62 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System.Xml;
+using System.Xml.XPath;
+using NUnit.Framework;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.Helpers
+{
+ ///
+ /// Methods that assert on structure of the input XML based on an xpath
+ ///
+ public class XmlHelper
+ {
+ ///
+ /// Return XML nodes matching the given xpath
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static XPathNodeIterator SelectNodes(string xPath, IXPathNavigable xmlNode, XmlNamespaceManager xmlnsManager)
+ {
+ TraceHelper.TraceInformation("Evaluating " + xPath);
+
+ XPathNavigator nav = xmlNode.CreateNavigator();
+ XPathExpression expr = nav.Compile(xPath);
+ expr.SetContext(xmlnsManager);
+ XPathNodeIterator xpni = nav.Select(expr);
+
+ return xpni;
+ }
+
+ ///
+ /// Asserts that the given path returns a single node
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static XPathNavigator SelectFirstAndOnlyNode(string xPath, IXPathNavigable xmlNode, XmlNamespaceManager xmlnsManager)
+ {
+ XPathNodeIterator xPathNodeIterator = SelectNodes(xPath, xmlNode, xmlnsManager);
+ Assert.That(xPathNodeIterator.Count, Is.EqualTo(1), "There should be one result from xpath query: " + xPath);
+ xPathNodeIterator.MoveNext();
+ TraceHelper.TraceInformation("Value: " + xPathNodeIterator.Current.Value);
+ return xPathNodeIterator.Current;
+ }
+
+ ///
+ /// Asserts that the given path returns zero nodes
+ ///
+ ///
+ ///
+ ///
+ public static void SelectZeroNodes(string xPath, IXPathNavigable xmlNode, XmlNamespaceManager xmlnsManager)
+ {
+ XPathNodeIterator xPathNodeIterator = SelectNodes(xPath, xmlNode, xmlnsManager);
+ Assert.That(xPathNodeIterator.Count, Is.EqualTo(0), "There should be zero results from xpath query: " + xPath);
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/Microsoft.SqlServer.Test.Manageability.Utils.csproj b/src/FunctionalTest/Framework/Microsoft.SqlServer.Test.Manageability.Utils.csproj
new file mode 100644
index 00000000..b33659ad
--- /dev/null
+++ b/src/FunctionalTest/Framework/Microsoft.SqlServer.Test.Manageability.Utils.csproj
@@ -0,0 +1,32 @@
+
+
+ $(MSBuildAllProjects);$(MSBuildThisFileFullPath)
+ $(TargetFrameworks)
+
+ true
+ {3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
+ $(NoWarn);NU1603
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+ $(BaseOutputPath)\Documentation\$(TargetFramework)\$(AssemblyName).xml
+ $(NoWarn);1591
+
+
diff --git a/src/FunctionalTest/Framework/PRVerification.runsettings b/src/FunctionalTest/Framework/PRVerification.runsettings
new file mode 100644
index 00000000..2dd1a040
--- /dev/null
+++ b/src/FunctionalTest/Framework/PRVerification.runsettings
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+ 1
+ true
+
+
diff --git a/src/FunctionalTest/Framework/Scripts/ValidateTableDataRetention_DataRetentionPeriodOption.sql b/src/FunctionalTest/Framework/Scripts/ValidateTableDataRetention_DataRetentionPeriodOption.sql
new file mode 100644
index 00000000..82892f05
--- /dev/null
+++ b/src/FunctionalTest/Framework/Scripts/ValidateTableDataRetention_DataRetentionPeriodOption.sql
@@ -0,0 +1,9 @@
+create table tableName_Month ([dbdatetime2] datetime2(7)) with (DATA_DELETION = ON ( FILTER_COLUMN = [dbdatetime2], RETENTION_PERIOD = 1 Month ) )
+
+create table tableName_Week ([dbdatetime2] datetime2(7)) with (DATA_DELETION = ON ( FILTER_COLUMN = [dbdatetime2], RETENTION_PERIOD = 1 Week ) )
+
+create table tableName_Day ([dbdatetime2] datetime2(7)) with (DATA_DELETION = ON ( FILTER_COLUMN = [dbdatetime2], RETENTION_PERIOD = 1 Day ) )
+
+create table tableName_Year ([dbdatetime2] datetime2(7)) with (DATA_DELETION = ON ( FILTER_COLUMN = [dbdatetime2], RETENTION_PERIOD = 1 Year ) )
+
+create table tableName_Infinite ([dbdatetime2] datetime2(7)) with (DATA_DELETION = ON ( FILTER_COLUMN = [dbdatetime2], RETENTION_PERIOD = INFINITE ) )
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Sql2017Linux.runsettings b/src/FunctionalTest/Framework/Sql2017Linux.runsettings
new file mode 100644
index 00000000..0f8e0a3a
--- /dev/null
+++ b/src/FunctionalTest/Framework/Sql2017Linux.runsettings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/SqlVersion.cs b/src/FunctionalTest/Framework/SqlVersion.cs
new file mode 100644
index 00000000..5ab88018
--- /dev/null
+++ b/src/FunctionalTest/Framework/SqlVersion.cs
@@ -0,0 +1,24 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ [Flags]
+ public enum SqlVersion
+ {
+ AzureSawaV1 = 1,
+ AzureSterlingV12 = 2,
+ Sql2005 = 4,
+ Sql2008 = 8,
+ Sql2008R2 = 16,
+ Sql2012 = 32,
+ Sql2012SP1 = 64,
+ Sql2014 = 128,
+ Sql2016 = 256,
+ Sql2017 = 512,
+ Sqlv150 = 1024
+ }
+}
diff --git a/src/FunctionalTest/Framework/Sqlv150Linux.runsettings b/src/FunctionalTest/Framework/Sqlv150Linux.runsettings
new file mode 100644
index 00000000..f99cd54f
--- /dev/null
+++ b/src/FunctionalTest/Framework/Sqlv150Linux.runsettings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/Sqlv160Linux.runsettings b/src/FunctionalTest/Framework/Sqlv160Linux.runsettings
new file mode 100644
index 00000000..32fbebae
--- /dev/null
+++ b/src/FunctionalTest/Framework/Sqlv160Linux.runsettings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/TestFramework/DisconnectedTestAttribute.cs b/src/FunctionalTest/Framework/TestFramework/DisconnectedTestAttribute.cs
new file mode 100644
index 00000000..51aebaae
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/DisconnectedTestAttribute.cs
@@ -0,0 +1,20 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Marks a test as being a "Disconnected" test, which means it will be run
+ /// without making a server connection first.
+ ///
+ [AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
+ public class DisconnectedTestAttribute : Attribute
+ {
+ public DisconnectedTestAttribute()
+ {
+
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlFeature.cs b/src/FunctionalTest/Framework/TestFramework/SqlFeature.cs
new file mode 100644
index 00000000..808663b8
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlFeature.cs
@@ -0,0 +1,42 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// The feature of SQL Server
+ /// Test cases uses this enum to mark the required features in order to run the test and test framework will find the servers with the required features enabled
+ ///
+ public enum SqlFeature
+ {
+ ///
+ /// Always On Availability Groups
+ ///
+ AlwaysOn,
+
+ ///
+ /// Azure SSIS
+ ///
+ AzureSSIS,
+
+ ///
+ /// In-memory aka Hekaton
+ ///
+ Hekaton,
+
+ ///
+ /// SqlClr hosting
+ ///
+ SqlClr,
+
+ ///
+ /// Azure Active Directory Logins SqlDB
+ ///
+ AADLoginsSqlDB,
+
+ ///
+ /// Azure Ledger Database support
+ ///
+ AzureLedger
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlRequiredFeatureAttribute.cs b/src/FunctionalTest/Framework/TestFramework/SqlRequiredFeatureAttribute.cs
new file mode 100644
index 00000000..5a2de84f
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlRequiredFeatureAttribute.cs
@@ -0,0 +1,31 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+
+ ///
+ /// The attribute to describe the required features that must be enabled on the target servers in order to run the test
+ ///
+ [AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
+ public class SqlRequiredFeatureAttribute : Attribute
+ {
+ private SqlFeature[] _requiredFeatures;
+
+
+ ///
+ /// Constructor
+ ///
+ public SqlRequiredFeatureAttribute(params SqlFeature[] requiredFeatures)
+ {
+ _requiredFeatures = requiredFeatures;
+ }
+
+ public SqlFeature[] RequiredFeatures
+ {
+ get { return _requiredFeatures; }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestAreaAttribute.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestAreaAttribute.cs
new file mode 100644
index 00000000..39684dc8
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestAreaAttribute.cs
@@ -0,0 +1,56 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// The different areas of tests. This is the logical grouping of functionality
+ /// provided, usually wrapped into a single feature or component.
+ ///
+ public enum SqlTestArea
+ {
+ AlwaysOn,
+ AutoParameterization,
+ ConnectionDialog,
+ ExtendedEvents,
+ GraphDb,
+ Hekaton,
+ ObjectExplorer,
+ OptionsDialog,
+ PBM,
+ Polybase,
+ QueryStore,
+ RegisteredServers,
+ Showplan,
+ SMO
+ }
+
+ ///
+ /// Helper attribute to mark test methods with different test areas which can be used to selectively
+ /// group and run tests. While a test can have multiple areas assigned to it that should be a rare
+ /// scenario.
+ ///
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)]
+ public class SqlTestAreaAttribute : TestCategoryBaseAttribute
+ {
+ private IList _areas;
+
+ public SqlTestAreaAttribute(params SqlTestArea[] areas)
+ {
+ _areas = areas;
+ }
+
+ public override IList TestCategories
+ {
+ get
+ {
+ return _areas.Select(c => c.ToString()).ToList();
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestBase.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestBase.cs
new file mode 100644
index 00000000..10c3b565
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestBase.cs
@@ -0,0 +1,787 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using Microsoft.SqlServer.Management.Smo;
+using SMO = Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Base class for tests, providing support for executing tests against specified target servers
+ /// using the SupportedSqlVersionsAttribute
+ ///
+ public abstract class SqlTestBase
+ {
+ static SqlTestBase()
+ {
+ AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve;
+ }
+
+ private static Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args)
+ {
+ // We need to emulated the binding redirects from ssms.exe.config for some DLLs
+ // Also the batchparserclient is from sql and depends on an older
+ // version of connectioninfo.
+ var redirections = new HashSet(new[] {
+ "Microsoft.SqlServer.BatchParserClient",
+ "Microsoft.SqlServer.ConnectionInfo",
+ "Microsoft.IdentityModel.Clients.ActiveDirectory",
+ "Newtonsoft.Json" }, System.StringComparer.OrdinalIgnoreCase);
+
+ var an = new AssemblyName(args.Name);
+ if (redirections.Contains(an.Name))
+ {
+ var dll = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), an.Name + ".dll");
+ Trace.TraceInformation($"Trying to load dll:{dll}");
+ try
+ {
+ var assembly = Assembly.LoadFrom(dll);
+ Trace.TraceInformation($"Loaded {dll}");
+ return assembly;
+ }
+ catch (Exception e)
+ {
+ Trace.TraceError($"Unable to load {dll}: {e}");
+ }
+ }
+ return null;
+ }
+
+ ///
+ ///Gets or sets the test context which provides
+ ///information about and functionality for the current test run.
+ ///
+ public TestContext TestContext
+ {
+ get { return testContextInstance; }
+ set { testContextInstance = value; }
+ }
+
+ private TestContext testContextInstance;
+
+ ///
+ /// Name of the server this test is running against
+ ///
+ protected string ServerName
+ {
+ get { return this.SqlConnectionStringBuilder.DataSource; }
+ }
+
+ ///
+ /// The user ID to use when connecting to the server for this test (if using SQL Auth)
+ ///
+ protected string UserId
+ {
+ get { return this.SqlConnectionStringBuilder.UserID; }
+ }
+
+ ///
+ /// The password to use when connecting to the server for this test (if using SQL Auth)
+ ///
+ protected string Password
+ {
+ get { return this.SqlConnectionStringBuilder.Password; }
+ }
+
+ private const string SqlConnectionStringBuilder_PropertyName = "SqlConnectionStringBuilder";
+
+ ///
+ /// The containing the connection string to use for this test
+ ///
+ protected SqlConnectionStringBuilder SqlConnectionStringBuilder
+ {
+ get
+ {
+ return (SqlConnectionStringBuilder) this.TestContext.Properties[SqlConnectionStringBuilder_PropertyName];
+ }
+ private set { this.TestContext.Properties[SqlConnectionStringBuilder_PropertyName] = value; }
+ }
+
+ protected SMO.Server ServerContext { get; set; }
+
+ protected MethodInfo TestMethod { get; set; }
+
+ ///
+ /// This corresponds to the "name" attribute in ConnectionInfo.xml,
+ /// used to allow tests to identify servers without relying on the
+ /// name of the actual backing server (in case that was to change).
+ ///
+ protected string TargetServerFriendlyName { get; set; }
+
+ protected AzureDatabaseEdition DefaultEdition = AzureDatabaseEdition.NotApplicable;
+
+ // Work around for NUnit 3.x Asserts accumulating failure messages across multiple vstest TestMethod invocations.
+ // This IDisposable manages the lifetime of the assert messages raised during our test.
+ private IDisposable nUnitDisposable;
+
+ // Specific to Azure databases, represent all edition types
+ public enum AzureDatabaseEdition
+ {
+ NotApplicable = 0, //If not applicable, we don't explicitly specify edition for dbs
+ Basic = 1,
+ Standard = 2,
+ Premium = 3,
+ DataWarehouse = 4,
+ Hyperscale = 5,
+ GeneralPurpose = 6,
+ BusinessCritical = 7
+ }
+
+ ///
+ /// Initialization method for the test, this contains initialization logic that should be ran before ALL
+ /// tests. Test-specific initialization logic should be implemented in an overridden MyTestInitialize()
+ /// method
+ ///
+ [TestInitialize()]
+ public void BaseTestInitialize()
+ {
+ //We need to get the Assembly containing the implementation of this type so GetType will resolve it correctly
+ //as FullyQualifiedTestClassName only contains the type name and not the assembly info (and GetType only
+ //looks in mscorlib and the current executing assembly)
+ Type testClass = this.GetType().GetTypeInfo().Assembly.GetType(this.TestContext.FullyQualifiedTestClassName);
+ this.TestMethod = testClass.GetMethod(this.TestContext.TestName);
+ nUnitDisposable = new NUnit.Framework.Internal.TestExecutionContext.IsolatedContext();
+ MyTestInitialize();
+ }
+
+ [TestCleanup]
+ public void TestCleanup()
+ {
+ if (nUnitDisposable != null)
+ {
+ nUnitDisposable.Dispose();
+ }
+ }
+ public virtual void MyTestInitialize()
+ {
+ }
+
+ ///
+ /// Method ran before each invocation of test method (in case of multiple target servers)
+ ///
+ public virtual void PreExecuteTest()
+ {
+ }
+
+ ///
+ /// Method ran after each invocation of test method (in case of multiple target servers)
+ ///
+ public virtual void PostExecuteTest()
+ {
+ }
+
+ ///
+ /// Executes the specified test method once for each server specified in the SupportedSqlVersion attribute on the
+ /// test method. Will call PreExecute() before the test method invocation and PostExecute() afterwards.
+ ///
+ ///
+ public virtual void ExecuteTest(Action testMethod)
+ {
+ ExecuteTestImpl(server => { testMethod.Invoke(); });
+ }
+
+ ///
+ /// Executes the specified test method once for each server specified in the SupportedSqlVersion attribute on the
+ /// test method. Will call PreExecute() before the test method invocation and PostExecute() afterwards.
+ ///
+ /// Test method to execute with the server object for this test as a parameter
+ public virtual void ExecuteTest(Action testMethod)
+ {
+ ExecuteTestImpl(testMethod.Invoke);
+ }
+
+ ///
+ /// Implementation of the ExecuteTest method, which will execute the specified test method once for each server
+ /// supported by the test.
+ ///
+ ///
+ private void ExecuteTestImpl(Action executeTestMethod)
+ {
+ //We need to get the Assembly containing the implementation of this type so GetType will resolve it correctly
+ //as FullyQualifiedTestClassName only contains the type name and not the assembly info (and GetType only
+ //looks in mscorlib and the current executing assembly)
+
+ if (this.IsDisconnectedTest())
+ {
+ this.ServerContext = null;
+ this.SqlConnectionStringBuilder = null;
+ try
+ {
+ TraceHelper.TraceInformation("Invoking PreExecute for Disconnected test");
+ PreExecuteTest();
+ TraceHelper.TraceInformation("Invoking test method {0} for Disconnected test", this.TestContext.TestName);
+ executeTestMethod.Invoke(null);
+ TraceHelper.TraceInformation("Invoking PostExecute for Disconnected test");
+ PostExecuteTest();
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ throw new InternalTestFailureException(
+ string.Format("Test '{0}' failed when executing disconnected test. Message:\n{1}\nStack Trace:\n{2}",
+ this.TestContext.TestName,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace), e);
+ }
+
+ }
+ else
+ {
+ this.ExecuteTestMethodWithFailureRetry(() =>
+ {
+ try
+ {
+ TraceHelper.TraceInformation("Invoking PreExecute for target server {0}", this.ServerContext.Name);
+ PreExecuteTest();
+ TraceHelper.TraceInformation("Invoking test method {0} with target server {1}",
+ this.TestContext.TestName, this.ServerContext.Name);
+ executeTestMethod.Invoke(this.ServerContext);
+ TraceHelper.TraceInformation("Invoking PostExecute for target server {0}", this.ServerContext.Name);
+ PostExecuteTest();
+
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ throw new InternalTestFailureException(
+ string.Format("Test '{0}' failed when targeting server {1}. Message:\n{2}\nStack Trace:\n{3}",
+ this.TestContext.TestName,
+ this.ServerContext.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace), e);
+ }
+ });
+
+ }
+ }
+
+ ///
+ /// Executes the specified test method from the pool associated with the test class, creating a new Database in the pool if needed
+ /// We use the class as the pool scope because test runs are multi-threaded, with test class being the partition for threads.
+ ///
+ /// The test method to execute
+ public void ExecuteFromDbPool(
+ Action testMethod)
+ {
+ ExecuteFromDbPool(TestContext.FullyQualifiedTestClassName, testMethod);
+ }
+
+ ///
+ /// Executes the specified test method from the pool specified, creating a new Database in the pool if needed. Currently only supports
+ /// creating basic DBs - if more options are required then this method can be extended to expose those as needed.
+ ///
+ /// The name of the pool
+ /// The test method to execute
+ public void ExecuteFromDbPool(
+ string poolName,
+ Action testMethod)
+ {
+ this.ExecuteTestMethodWithFailureRetry(
+ () =>
+ {
+ Database db = TestServerPoolManager.GetDbFromPool(poolName, this.ServerContext);
+ Trace.TraceInformation($"Returning database {db.Name} for pool {poolName}");
+ if (db.UserAccess == DatabaseUserAccess.Single)
+ {
+ Trace.TraceInformation("Prior test set database to single user, setting back to multiple");
+ db.UserAccess = DatabaseUserAccess.Multiple;
+ db.Alter();
+ }
+ db.ExecutionManager.ConnectionContext.Disconnect();
+ db.ExecutionManager.ConnectionContext.SqlExecutionModes = SqlExecutionModes.ExecuteSql;
+ try
+ {
+ TraceHelper.TraceInformation("Invoking PreExecute for target server {0}", this.ServerContext.Name);
+ PreExecuteTest();
+ TraceHelper.TraceInformation("Invoking test method {0} with target server {1} using database from pool {2}",
+ this.TestContext.TestName, this.ServerContext.Name, poolName);
+ testMethod.Invoke(db);
+ TraceHelper.TraceInformation("Invoking PostExecute for target server {0}",
+ this.ServerContext.Name);
+ PostExecuteTest();
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ string message = string.Format(
+ "Test '{0}' failed when targeting server {1}. Message:\n{2}\nStack Trace:\n{3}",
+ this.TestContext.TestName,
+ this.ServerContext.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ Trace.TraceError(message);
+ throw new InternalTestFailureException(message, e);
+ }
+ finally
+ {
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ }
+ });
+ }
+ ///
+ /// Creates a new database and calls the given test method with that database, then drops
+ /// the database after execution if still exists.
+ ///
+ /// Name prefix for new database
+ /// The test method to execute, with the newly created database passed as a parameter
+ public virtual void ExecuteWithDbDrop(
+ string dbNamePrefix,
+ Action testMethod)
+ {
+ ExecuteWithDbDrop(dbNamePrefix, dbBackupFile: null, testMethod: testMethod);
+ }
+
+ ///
+ /// Creates a new database and calls the given test method with that database, then drops
+ /// the database after execution if still exists.
+ ///
+ /// The test method to execute, with the newly created database passed as parameters
+ /// Azure database edition if any
+ public virtual void ExecuteWithDbDrop(
+ Action testMethod,
+ AzureDatabaseEdition dbAzureDatabaseEdition = AzureDatabaseEdition.NotApplicable)
+ {
+ string dbNamePrefix = string.IsNullOrEmpty(this.TestContext.TestName)
+ ? this.TestContext.TestName
+ : this.GetType().Name;
+ ExecuteWithDbDrop(dbNamePrefix, dbBackupFile: null, testMethod: testMethod, dbAzureDatabaseEdition: dbAzureDatabaseEdition);
+ }
+
+ ///
+ /// Restores a database from a backup file and then executes the specified action method against it for each
+ /// server specified. After execution the database is dropped if still exists (regardless out of the outcome)
+ ///
+ ///
+ /// Name prefix for new database
+ /// Default to create a new database if a null or empty string is given
+ /// The test method to execute, with newly created database passed as parameters
+ /// Azure database eidtion if any
+ /// NOTE : The same backup file is used for ALL servers specified
+ public virtual void ExecuteWithDbDrop(
+ string dbNamePrefix,
+ string dbBackupFile,
+ Action testMethod,
+ AzureDatabaseEdition dbAzureDatabaseEdition = AzureDatabaseEdition.NotApplicable)
+ {
+ ExecuteWithDbDropImpl(
+ dbNamePrefix: dbNamePrefix,
+ dbAzureDatabaseEdition: dbAzureDatabaseEdition,
+ dbBackupFile: dbBackupFile,
+ createDbSnapshot: false,
+ executeTestMethodMethod: (database) => { testMethod.Invoke(database); });
+ }
+
+ ///
+ /// Restores a database from a backup file OR create a new database, with specific azure db edition if provided,
+ /// then executes the specified action method against it for each server specified. After execution the database
+ /// is dropped if still exists
+ ///
+ /// Name prefix for new database
+ /// Edition type specific for Azure SQL databases, if "NotApplicable" server default setting is used
+ /// Path of backup file to restore the database, if NULL/EmptyString then create a new database
+ /// The test method to execute, with newly created database passed as parameters
+ /// NOTE : The same backup file is used for ALL servers specified
+ public virtual void ExecuteWithDbDrop(
+ string dbNamePrefix,
+ AzureDatabaseEdition dbAzureEdition,
+ string dbBackupFile,
+ Action testMethod)
+ {
+ ExecuteWithDbDropImpl(
+ dbNamePrefix: dbNamePrefix,
+ dbAzureDatabaseEdition: dbAzureEdition,
+ dbBackupFile: dbBackupFile,
+ createDbSnapshot: false,
+ executeTestMethodMethod: testMethod);
+ }
+
+ ///
+ /// Restores a database from a backup file OR create a new database, with specific azure db edition if provided,
+ /// then executes the specified action method against it for each server specified. After execution the database
+ /// is dropped if still exists
+ ///
+ /// Name prefix for new database
+ /// Edition type specific for Azure SQL databases, if "NotApplicable" server default setting is used
+ /// Path of backup file to restore the database, if NULL/EmptyString then create a new database
+ /// Whether to create a snapshot of the DB after creation
+ /// The test method to execute, with newly created database passed as parameters
+ /// NOTE : The same backup file is used for ALL servers specified
+ public virtual void ExecuteWithDbDrop(
+ string dbNamePrefix,
+ AzureDatabaseEdition dbAzureEdition,
+ string dbBackupFile,
+ bool createDbSnapshot,
+ Action testMethod)
+ {
+ ExecuteWithDbDropImpl(
+ dbNamePrefix: dbNamePrefix,
+ dbAzureDatabaseEdition: dbAzureEdition,
+ dbBackupFile: dbBackupFile,
+ createDbSnapshot: createDbSnapshot,
+ executeTestMethodMethod: testMethod);
+ }
+
+ ///
+ /// Implementation of the ExecuteWithDbDrop, calls executeTestMethodMethod once for each supported server version
+ ///
+ /// Name prefix for new database
+ /// Edition type specific for Azure SQL databases, if "NotApplicable" server default setting is used
+ /// Path of backup file to restore the database, if NULL/EmptyString then create a new database
+ ///
+ ///
+ /// The action called to invoke the test method, this should simply just call the test method itself with whatever parameters it needs
+ ///
+ private void ExecuteWithDbDropImpl(
+ string dbNamePrefix,
+ AzureDatabaseEdition dbAzureDatabaseEdition,
+ string dbBackupFile,
+ bool createDbSnapshot,
+ Action executeTestMethodMethod)
+ {
+ var requestedEdition = dbAzureDatabaseEdition;
+ this.ExecuteTestMethodWithFailureRetry(
+ () =>
+ {
+ var originalEdition = requestedEdition;
+ if (requestedEdition == AzureDatabaseEdition.NotApplicable)
+ {
+ // if the default edition specified in the XML for the current server is DW,
+ // pass that along to the helper
+ var desiredEdition = ConnectionHelpers.GetDefaultEdition(TargetServerFriendlyName);
+ if (desiredEdition == DatabaseEngineEdition.SqlDataWarehouse)
+ {
+ requestedEdition = AzureDatabaseEdition.DataWarehouse;
+ }
+ }
+ Database db;
+ try
+ {
+ db = this.ServerContext.CreateDatabaseWithRetry(dbNamePrefix, requestedEdition, dbBackupFile);
+ }
+ finally
+ {
+ requestedEdition = originalEdition;
+ }
+ Database dbSnapshot = createDbSnapshot ? this.ServerContext.CreateDbSnapshotWithRetry(db) : null;
+
+ try
+ {
+ TraceHelper.TraceInformation("Invoking PreExecute for target server {0}", this.ServerContext.Name);
+ PreExecuteTest();
+ TraceHelper.TraceInformation("Invoking test method {0} with target server {1}",
+ this.TestContext.TestName, this.ServerContext.Name);
+ executeTestMethodMethod.Invoke(db);
+ TraceHelper.TraceInformation("Invoking PostExecute for target server {0}",
+ this.ServerContext.Name);
+ PostExecuteTest();
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ string message = string.Format(
+ "Test '{0}' failed when targeting server {1}. Message:\n{2}\nStack Trace:\n{3}",
+ this.TestContext.TestName,
+ this.ServerContext.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace);
+ Trace.TraceError(message);
+ throw new InternalTestFailureException(message, e);
+ }
+ finally
+ {
+ // snapshots have to be deleted first
+ if (dbSnapshot != null)
+ {
+ this.ServerContext.DropKillDatabaseNoThrow(dbSnapshot.Name);
+ }
+ this.ServerContext.DropKillDatabaseNoThrow(db.Name);
+ }
+ });
+ }
+
+ ///
+ /// Defines a new database on specific server, and then executes the specified action method on this database.
+ /// After execution, the database is dropped if exists.
+ ///
+ /// The server object
+ /// Name prefix for new database
+ /// The test method to execute, with newly created database passed as parameters
+ public void ExecuteMethodWithDbDrop(
+ SMO.Server server,
+ string dbNamePrefix,
+ Action executeMethod)
+ {
+ string databaseName = SmoObjectHelpers.GenerateUniqueObjectName(dbNamePrefix);
+ Database database;
+ try
+ {
+ TraceHelper.TraceInformation("Creating new database '{0}' on server '{1}'", databaseName, server.Name);
+ database = new Database(server, databaseName);
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ throw new InternalTestFailureException(
+ string.Format(
+ "Test setup for Test '{0}' failed when targeting server {1}. Message:\n{2}\nStack Trace:\n{3}",
+ this.TestContext.TestName,
+ server.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace), e);
+ }
+
+ try
+ {
+ TraceHelper.TraceInformation("Invoking test method {0} with target server {1}",
+ this.TestContext.TestName, this.ServerContext.Name);
+ executeMethod.Invoke(database);
+ }
+ catch (Exception e)
+ {
+ // Add in some more information
+ throw new InternalTestFailureException(
+ string.Format("Test '{0}' failed when targeting server {1}. Message:\n{2}\nStack Trace:\n{3}",
+ this.TestContext.TestName,
+ this.ServerContext.Name,
+ e.BuildRecursiveExceptionMessage(),
+ e.StackTrace), e);
+ }
+ finally
+ {
+ server.DropKillDatabaseNoThrow(databaseName);
+ }
+ }
+
+ ///
+ /// Defines the specified test method against multiple servers
+ ///
+ /// The number of required servers to run the test
+ /// A boolean value indicating whether the required servers need to have the same host platform
+ /// A boolean value indicating whether the required servers need to have the same version
+ /// The test action
+ public void ExecuteWithMultipleServers(int numOfServers, bool requiresSameHostPlatform, bool requiresSameMajorVersion, Action> action)
+ {
+ if (numOfServers < 1)
+ {
+ throw new ArgumentException(string.Format("Invalid value provided: {0}", numOfServers), "numOfServers");
+ }
+
+ TraceHelper.TraceInformation("Executing test against multiple servers. numOfServers: {0}, requiresSameHostPlatform, value: {1}, requiresSameMajorVersion: {2}", numOfServers, requiresSameHostPlatform, requiresSameMajorVersion);
+
+ var servers = ConnectionHelpers.GetServerConnections(this.TestMethod, TestContext.SqlTestTargetServersFilter).
+ Select(connection => new SMO.Server(new ServerConnection(new SqlConnection(connection.Value.First().ConnectionString)))).ToArray();
+
+ TraceHelper.TraceInformation("Number of target servers for the test before grouping: {0}", servers.Length);
+
+ var groupResults = servers.GroupBy(server =>
+ {
+ string groupKey = "ServerGroup";
+
+ if (requiresSameHostPlatform)
+ {
+ groupKey += server.HostPlatform;
+ }
+
+ if (requiresSameMajorVersion)
+ {
+ groupKey += server.VersionMajor;
+ }
+
+ return groupKey;
+ }).ToArray();
+
+ TraceHelper.TraceInformation("Number of server groups for the test: {0}", groupResults.Count());
+
+ foreach (var groupResult in groupResults)
+ {
+ if (groupResult.Count() >= numOfServers)
+ {
+ var targetServers = groupResult.Take(numOfServers).ToArray();
+ TraceHelper.TraceInformation("Server group: {0}, target servers: {1}", groupResult.Key, string.Join(",", targetServers.Select(srv => srv.NetNameWithInstance())));
+ this.SqlConnectionStringBuilder = new SqlConnectionStringBuilder(targetServers[0].ConnectionContext.ConnectionString);
+
+ try
+ {
+ PreExecuteTest();
+ action(targetServers);
+ }
+ finally
+ {
+ PostExecuteTest();
+ }
+ }
+ else
+ {
+ Trace.TraceWarning("Server group '{0}' doesn't have enough servers for the test, number of servers in the group: {1}", groupResult.Key, groupResult.Count());
+ }
+ }
+ }
+
+ ///
+ /// Returns the master database in the given server context
+ ///
+ /// The test method to execute, with the newly created database passed as a parameter
+ public void ExecuteWithMasterDb(Action testMethod)
+ {
+ ExecuteWithMasterDbImpl(AzureDatabaseEdition.NotApplicable,
+ (database) => { testMethod.Invoke(database); });
+ }
+
+
+ ///
+ /// Implementation of the ExecuteWithMaster, calls executeTestMethodMethod once for each supported server version
+ ///
+ /// Edition type specific for Azure SQL databases, if "NotApplicable" server default setting is used
+ ///
+ /// The action called to invoke the test method, this should simply just call the test method itself with whatever parameters it needs
+ ///
+ public void ExecuteWithMasterDbImpl(AzureDatabaseEdition edition, Action executeTestMethodMethod)
+ {
+ this.ExecuteTestMethodWithFailureRetry(
+ () =>
+ {
+ Database database = this.ServerContext.Databases["master"];
+ executeTestMethodMethod(database);
+ });
+ }
+
+ #region Private Helper Methods
+
+ ///
+ /// Executes the specified test method against each applicable server. For each server it will
+ /// first try the "primary" connection, and if that fails will move on to try again with any backup
+ /// servers defined until no more servers are left for that target (in which case the test will be
+ /// marked as a failure) or the method passes successfully (at which point it will move on to the
+ /// next server target)
+ ///
+ ///
+ private void ExecuteTestMethodWithFailureRetry(Action testMethod)
+ {
+ var targetServerExceptions = new LinkedList>();
+ Trace.TraceInformation($"Server filter:{TestContext.Properties["SqlTestTargetServersFilter"]}");
+ var first = true;
+ foreach (
+ KeyValuePair> serverConnection in
+ ConnectionHelpers.GetServerConnections(this.TestMethod, TestContext.SqlTestTargetServersFilter))
+ {
+ // Prevent nunit assert messages from accumulating between server version iterations
+ using (new NUnit.Framework.Internal.TestExecutionContext.IsolatedContext())
+ {
+ try
+ {
+ bool passed = false;
+ var exceptions = new LinkedList>();
+ // some tests target a specific server but use a hard coded friendly name
+ // any test that matches multiple servers isn't doing that.
+ if (!first || TargetServerFriendlyName == null)
+ {
+ TargetServerFriendlyName = serverConnection.Key;
+ }
+
+ first = false;
+ foreach (SqlConnectionStringBuilder conn in serverConnection.Value)
+ {
+ this.SqlConnectionStringBuilder = conn;
+ this.ServerContext =
+ new SMO.Server(
+ new ServerConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString))
+ {
+ StatementTimeout = 480
+ });
+ try
+ {
+ testMethod.Invoke();
+ passed = true;
+ break; //Test passed successfully so we're done here
+ }
+ catch (Exception e)
+ {
+ exceptions.AddLast(new Tuple(
+ this.SqlConnectionStringBuilder.DataSource,
+ e));
+ continue;
+ }
+ }
+
+ if (!passed)
+ {
+ //Build up the aggregate exception of all the exceptions we gathered during the run. We put the message and stack trace for each
+ //one into the message itself so it's easily visible from the test failure (AggregateException.ToString doesn't print out that
+ //information by default)
+ throw new AggregateException(
+ String.Format(
+ "Test '{0}' failed against all defined server connections for target server name {1}{2}",
+ this.TestMethod.Name,
+ this.TargetServerFriendlyName,
+ string.Join("\n",
+ exceptions.Select(
+ e =>
+ String.Format("\n******* {0} *******\n{1}\n{2}", e.Item1,
+ e.Item2.Message,
+ e.Item2.StackTrace))))
+ , exceptions.Select(e => e.Item2));
+ }
+ }
+ catch (Exception e)
+ {
+ //We failed against one of our target servers, record it and then move on to the next (we'll fail the test once all
+ //target servers have been ran against)
+ targetServerExceptions.AddLast(new Tuple(this.TargetServerFriendlyName, e));
+ }
+ }
+ }
+
+ //We got some errors against one or more of the target servers, throw an exception with the aggregate information to
+ //fail the test
+ if (targetServerExceptions.Count > 0)
+ {
+ throw new AggregateException(
+ string.Format(
+ "Test '{0}' failed against the following TargetServers : {1}\nExceptions : \n{2}",
+ this.TestMethod.Name,
+ string.Join(",", targetServerExceptions.Select(e => e.Item1)), //List of all failed server friendly names
+ //Formatted exception infor for each target server failure
+ string.Join("\n", targetServerExceptions.Select(
+ e =>
+ String.Format(
+@"******* {0} *******
+Message : {1}
+{2}",
+ e.Item1, //ServerName
+ e.Item2.Message,
+ e.Item2.StackTrace)))));
+ }
+ }
+
+ #endregion // Private Helper Methods
+
+ #region Helper Methods
+
+ ///
+ /// Returns TRUE if the test is marked with the attribute,
+ /// indicating that it is a disconnected test (will be ran without actually connecting to a server)
+ ///
+ ///
+ protected bool IsDisconnectedTest()
+ {
+ return this.TestMethod.GetCustomAttribute() != null;
+ }
+
+ #endregion //Helper Methods
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestCategoryAttribute.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestCategoryAttribute.cs
new file mode 100644
index 00000000..8c87b637
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestCategoryAttribute.cs
@@ -0,0 +1,66 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// The different categories of tests. A test can have multiple categories,
+ /// so a separate category should be made for each scenario or grouping of
+ /// tests that exists
+ ///
+ public enum SqlTestCategory
+ {
+ ///
+ /// For tests that should be run as part of a release signoff
+ ///
+ Signoff,
+ ///
+ /// For tests that validate regressions (possible future or previously occured) from happening
+ ///
+ NoRegression,
+ ///
+ /// Baseline verification tests for SMO
+ ///
+ SmoBaseline,
+ ///
+ /// Tests that are currently in staging - they will not be ran as part of "official" test runs
+ /// until they've been validated as being stable
+ ///
+ Staging
+ }
+
+ ///
+ /// Helper attribute to mark test methods with different test categories which can be used to selectively
+ /// group and run tests.
+ ///
+ [AttributeUsage(AttributeTargets.Method)]
+ public class SqlTestCategoryAttribute : TestCategoryBaseAttribute
+ {
+ private IList _categories;
+
+ ///
+ /// Constructs a new SqlTestCategoryAttribute with the given categories
+ ///
+ ///
+ public SqlTestCategoryAttribute(params SqlTestCategory[] categories)
+ {
+ _categories = categories;
+ }
+
+ ///
+ /// Returns the list of categories associated with this attribute
+ ///
+ public override IList TestCategories
+ {
+ get
+ {
+ return _categories.Select(c => c.ToString()).ToList();
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestDimensionAttribute.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestDimensionAttribute.cs
new file mode 100644
index 00000000..ae0b8ff4
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestDimensionAttribute.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Base attribute class for adding supported server metadata to tests. This allows
+ /// tests to specify what servers they support by using the different implementations
+ /// of this class (such as SupportedServerVersionRange to specify what range of versions
+ /// a test supports)
+ ///
+ public abstract class SqlTestDimensionAttribute : Attribute
+ {
+
+ protected SqlTestDimensionAttribute()
+ {
+
+ }
+
+ ///
+ /// Checks whether the specified server is "supported", that is whether it meets the
+ /// criteria of the dimension that is being defined.
+ ///
+ /// Override this only if you need to use the EngineEdition or TargetServerFriendlyName, otherwise just
+ /// implement IsSupported(Server)
+ ///
+ ///
+ ///
+ /// ///
+ ///
+ public virtual bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ //Default is to ignore dbEngineEdition and targetServerFriendlyName since most of the dimensions won't care about those.
+ return IsSupported(server);
+ }
+
+ ///
+ /// Checks whether the specified server is "supported", that is whether it meets the
+ /// criteria of the dimension that is being defined.
+ ///
+ ///
+ ///
+ public abstract bool IsSupported(SMO.Server server);
+
+ }
+
+ ///
+ /// More specialized version of the TestDimension for use by positive dimensions (inclusive
+ /// check, if any of these are true then the server is considered supported)
+ ///
+ public abstract class SqlSupportedDimensionAttribute : SqlTestDimensionAttribute
+ { }
+
+ ///
+ /// More specialized version of the SqlTestDimension for use by negative dimensions
+ /// (exclusive check, if any of these are false then the server is considered NOT supported)
+ ///
+ public abstract class SqlUnsupportedDimensionAttribute : SqlTestDimensionAttribute
+ { }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestHelpers.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestHelpers.cs
new file mode 100644
index 00000000..16368a65
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestHelpers.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System.Data;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Reflection;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// General helper method/properties for use with the SQL Manageability Test Framework
+ ///
+ public static class SqlTestHelpers
+ {
+ ///
+ /// Helper function to test the value of a specified property
+ ///
+ /// The object containing the property to be tested
+ /// The name of the property to test
+ /// The expected value of the property
+ ///
+ public static SqlTestResult TestReadProperty(object obj, string propertyName, object expectedValue)
+ {
+ var result = new SqlTestResult();
+
+ if (obj == null)
+ {
+ result.AddFailures(
+ string.Format("Object is null! Property Name = '{0}' ExpectedValue = '{1}'",
+ propertyName,
+ expectedValue));
+ return result;
+ }
+
+ PropertyInfo pi = obj.GetType().GetProperty(propertyName);
+ if (pi == null)
+ {
+ result.AddFailures(
+ string.Format("Object of type '{0}' did not have expected property '{1}'",
+ obj.GetType(),
+ propertyName));
+ return result;
+ }
+
+ object actualValue = pi.GetValue(obj, null);
+
+ //One or both may be null so we use object.Equals
+ if (object.Equals(actualValue, expectedValue) == false)
+ {
+ result.AddFailures(
+ string.Format(
+ "Property {0} was expected to be '{1}' (type '{2}') but actual value was '{3}' (type '{4}')",
+ propertyName,
+ expectedValue ?? "null",
+ expectedValue == null ? "null" : expectedValue.GetType().ToString(),
+ actualValue ?? "null",
+ actualValue == null ? "null" : actualValue.GetType().ToString()));
+ }
+ return result;
+
+ }
+
+ public static void CleanupOldDbs(SqlConnection sqlConnection)
+ {
+ bool wasOpen = sqlConnection.State == ConnectionState.Open;
+ if (!wasOpen)
+ {
+ sqlConnection.Open();
+ }
+ try
+ {
+
+ using (var sqlCommand = sqlConnection.CreateCommand())
+ {
+ sqlCommand.CommandTimeout = 60;
+ sqlCommand.CommandText = @"declare @dbname sysname
+declare @createDate datetime
+select @createDate = getdate()
+
+declare dbc cursor for
+ select dbs.name
+ from
+ sys.databases dbs
+ where
+ dbs.database_id > 4
+ and dbs.name not like N'keep%'
+ and dbs.name not like N'Keep%'
+ and dbs.name not like N'LanguageSemantics'
+ and dbs.name not in ('DWConfiguration', 'DWDiagnostics', 'DWQueue') --Required for Polybase
+ --and dbs.is_read_only = 0
+ --and dbs.user_access = 0
+ and dbs.create_date < dateadd(hh, -1, @createDate)
+
+
+open dbc
+fetch next from dbc
+ into @dbname
+
+while @@FETCH_STATUS = 0
+begin
+
+ begin try
+ -- delete the db
+ print @dbname
+ declare @esql nvarchar(512)
+ select @esql = N'drop database ' + QUOTENAME(@dbname)
+ exec sp_executesql @esql
+ end try
+ begin catch
+ print 'error ' + @dbname
+ end catch
+
+ fetch next from dbc
+ into @dbname
+end
+
+close dbc
+deallocate dbc";
+ sqlCommand.CommandType = CommandType.Text;
+ sqlCommand.ExecuteNonQuery();
+ }
+ }
+ catch (SqlException)
+ {
+ }
+ finally
+ {
+ if (!wasOpen)
+ {
+ sqlConnection.Close();
+ }
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SqlTestResult.cs b/src/FunctionalTest/Framework/TestFramework/SqlTestResult.cs
new file mode 100644
index 00000000..fc523260
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SqlTestResult.cs
@@ -0,0 +1,105 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Keeps track of the result of tests
+ ///
+ public class SqlTestResult
+ {
+ ///
+ /// Whether the test succeeded, having any failures added will cause
+ /// this to return FALSE
+ ///
+ private bool failed;
+ public bool Succeeded
+ {
+ get { return failureReasons.Count == 0 && !failed; }
+ }
+ private IList failureReasons = new List();
+
+ ///
+ /// Adds a new failure string to the list of failure reasons for this result. This string will be
+ /// formatted with the specified options.
+ ///
+ /// Format string to add
+ /// Args to pass to the format string
+ public void AddFailure(string formatString, params object[] args)
+ {
+ this.AddFailure(string.Format(formatString, args));
+ }
+
+ ///
+ /// Adds a new failure string to the list of failure reasons for this result.
+ ///
+ ///
+ public void AddFailure(string failureReason)
+ {
+ this.failureReasons.Add(failureReason);
+ }
+ ///
+ /// Adds specified failure reasons to the list of failure reasons for this result
+ ///
+ ///
+ public void AddFailures(params string[] failureReasons)
+ {
+ foreach (string failureReason in failureReasons)
+ {
+ this.AddFailure(failureReason);
+ }
+ }
+
+ ///
+ /// Invokes the given action and logs any exception message as a failure
+ ///
+ ///
+ public Exception HandleException(Action a)
+ {
+ try
+ {
+ a();
+ }
+ catch (Exception e)
+ {
+ this.AddFailure(e.Message);
+ return e;
+ }
+ return null;
+ }
+
+ ///
+ /// Operator for merging two TestResults together. Will & their respective
+ /// result values and concat their list of failure reasons.
+ ///
+ ///
+ ///
+ ///
+ public static SqlTestResult operator &(SqlTestResult left, SqlTestResult right)
+ {
+ var ret = new SqlTestResult()
+ {
+ failureReasons = new List(left.failureReasons).Concat(right.failureReasons).ToList(),
+ failed = left.failed || right.failed
+ };
+ return ret;
+ }
+
+ ///
+ /// A string containing all the failure reasons given, with each reason being on a separate line.
+ ///
+ public string FailureReasons
+ {
+ get
+ {
+ return this.failureReasons.Count > 0 ?
+ "Failure Reasons : " + Environment.NewLine + string.Join(Environment.NewLine, failureReasons) :
+ String.Empty;
+ }
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SupportedServerVersionRange.cs b/src/FunctionalTest/Framework/TestFramework/SupportedServerVersionRange.cs
new file mode 100644
index 00000000..c6d47834
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SupportedServerVersionRange.cs
@@ -0,0 +1,226 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using Microsoft.SqlServer.Management.Common;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Attribute to mark a test method with the specific server versions it supports. It does this
+ /// through the various Min and Max properties, which combine to make two versions
+ /// (MinMajor.MinMinor.MinBuild.MinRevision and MaxMajor.MaxMinor.MaxBuild.MaxRevision). As long
+ /// as the version for a server is between (inclusive) these two versions that server is considered
+ /// supported.
+ ///
+ /// The MinVersion has a default of 0.0.0.0 and the MaxVersion has a default of IntMax.IntMax.IntMax.IntMax.
+ ///
+ /// Any combination of the version parts can be overridden as desired. So for example if you want to run
+ /// a test against all SQL2016 servers you would specify the attribute like this:
+ ///
+ /// [SupportedServerVersionRange(MinMajor = 13, MaxMajor = 13)]
+ ///
+ /// Or if a feature was added in a specific build you could do something like this, which would run
+ /// the test only if the Major part was 13 and the Minor part was >= 300.
+ ///
+ /// [SupportedServerVersionRange(MinMajor = 13, MinBuild = 300, MaxMajor = 13)]
+ ///
+ /// There is also support for specifying a range only be applicable to a certain type/edition.
+ ///
+ /// [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 13)]
+ /// [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase, MinMajor = 12)]
+ ///
+ /// These attributes on a test method will mean a server is considered supported if the MajorVersion >= 13
+ /// OR the EngineType is Azure and the MajorVersion is >= 12.
+ ///
+ /// Note that by default not using this attribute will mean ALL server versions are valid
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = true)]
+ public class SupportedServerVersionRangeAttribute : SqlSupportedDimensionAttribute
+ {
+ //Attributes only allow constant values as parameters so we
+ //need to have each value be exposed as its own property
+ private int _minmajor = 0, _maxMajor = Int32.MaxValue;
+ private int _minMinor = 0, _maxMinor = Int32.MaxValue;
+ private int _minBuild = 0, _maxBuild = Int32.MaxValue;
+ private int _minRevision = 0, _maxRevision = Int32.MaxValue;
+ private DatabaseEngineType _engineType = DatabaseEngineType.Unknown;
+ private DatabaseEngineEdition _engineEdition = DatabaseEngineEdition.Unknown;
+
+ ///
+ /// Constructs a SupportedServerVersionRangeAttribute with default settings to match every server
+ ///
+ public SupportedServerVersionRangeAttribute()
+ {
+ HostPlatform = null;
+ }
+
+ ///
+ /// The Major (1st) part of the MinVersion version
+ ///
+ public int MinMajor
+ {
+ get { return _minmajor; }
+ set { _minmajor = value; }
+ }
+
+ ///
+ /// The Major (1st) part of the MaxVersion version
+ ///
+ public int MaxMajor
+ {
+ get { return _maxMajor; }
+ set { _maxMajor = value; }
+ }
+
+ ///
+ /// The Minor (2nd) part of the MinVersion version
+ ///
+ public int MinMinor
+ {
+ get { return _minMinor; }
+ set { _minMinor = value; }
+ }
+
+ ///
+ /// The Minor (2nd) part of the MaxVersion version
+ ///
+ public int MaxMinor
+ {
+ get { return _maxMinor; }
+ set { _maxMinor = value; }
+ }
+
+ ///
+ /// The Build (3rd) part of the MinVersion version
+ ///
+ public int MinBuild
+ {
+ get { return _minBuild; }
+ set { _minBuild = value; }
+ }
+
+ ///
+ /// The Build (3rd) part of the MaxVersion version
+ ///
+ public int MaxBuild
+ {
+ get { return _maxBuild; }
+ set { _maxBuild = value; }
+ }
+
+ ///
+ /// The Revision (4th) part of the MinVersion version
+ ///
+ public int MinRevision
+ {
+ get { return _minRevision; }
+ set { _minRevision = value; }
+ }
+
+ ///
+ /// The Revision (4th) part of the MaxVersion version
+ ///
+ public int MaxRevision
+ {
+ get { return _maxRevision; }
+ set { _maxRevision = value; }
+ }
+
+ ///
+ /// The DatabaseEngineType this version range applies to
+ ///
+ public DatabaseEngineType DatabaseEngineType
+ {
+ get { return _engineType; }
+ set { _engineType = value; }
+ }
+
+ ///
+ /// The server DatabaseEngineEdition this version range applies to
+ ///
+ public DatabaseEngineEdition Edition
+ {
+ get { return _engineEdition; }
+ set { _engineEdition = value; }
+ }
+
+ ///
+ /// Platform supported. Null if all platforms are supported
+ ///
+ public string HostPlatform { get; set; }
+
+ ///
+ /// Checks that the EngineEdition matches before calling IsSupported(Server)
+ ///
+ ///
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ //Special case for Azure Datawarehouse - if the server we're checking is
+ //specifically a DW server then it's only supported if the test specifically
+ //says it supports Datawarehouse. This is because most tests aren't actually
+ //creating a datawarehouse DB so there's no reason to run them there (it'll
+ //just be a normal Azure DB, which we already have another server covering)
+
+ //Otherwise if the attribute was set to a specific edition and the editions
+ //don't match return false (not supported)
+ if (
+ (serverDescriptor.DatabaseEngineEdition == DatabaseEngineEdition.SqlDataWarehouse || this.Edition != DatabaseEngineEdition.Unknown) &&
+ this.Edition != serverDescriptor.DatabaseEngineEdition)
+ {
+ return false;
+ }
+
+ // if the xml provides the MajorVersion we can avoid a server query
+ if (serverDescriptor.MajorVersion > 0 &&
+ (MaxMajor < serverDescriptor.MajorVersion || MinMajor > serverDescriptor.MajorVersion))
+ {
+ return false;
+ }
+
+ if (!string.IsNullOrEmpty(HostPlatform) && !string.IsNullOrEmpty(serverDescriptor.HostPlatform))
+ {
+ if (string.Compare(HostPlatform, serverDescriptor.HostPlatform, StringComparison.OrdinalIgnoreCase) != 0)
+ {
+ return false;
+ }
+ }
+
+ if (this.DatabaseEngineType != DatabaseEngineType.Unknown && serverDescriptor.DatabaseEngineType != this.DatabaseEngineType)
+ {
+ return false;
+ }
+ return IsSupported(server);
+ }
+
+ ///
+ /// The server is supported if server.Version >= MinVersion and server.Version less than or = MaxVersion.
+ /// By default the MinVersion is 0.0.0.0 and MaxVersion is IntMax.IntMax.IntMax.IntMax (which
+ /// all versions will return true for)
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server)
+
+ {
+ if (this.DatabaseEngineType != DatabaseEngineType.Unknown && this.DatabaseEngineType != server.ServerType)
+ {
+ return false;
+ }
+ if (!string.IsNullOrEmpty(HostPlatform) &&
+ string.Compare(HostPlatform, server.HostPlatform, StringComparison.InvariantCultureIgnoreCase) != 0)
+ {
+ return false;
+ }
+ Version serverVersion = server.Version;
+ Version minVersion = new Version(MinMajor, MinMinor, MinBuild, MinRevision);
+ Version maxVersion = new Version(MaxMajor, MaxMinor, MaxBuild, MaxRevision);
+
+ return serverVersion >= minVersion && serverVersion <= maxVersion;
+ }
+
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/SupportedTargetServerFriendlyName.cs b/src/FunctionalTest/Framework/TestFramework/SupportedTargetServerFriendlyName.cs
new file mode 100644
index 00000000..41482336
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/SupportedTargetServerFriendlyName.cs
@@ -0,0 +1,47 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+
+ ///
+ /// Checks whether the specified TargetServerFriendlyName (name attribute from ConnectionInfo.xml)
+ /// matches one of the ones specified.
+ ///
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = true)]
+ public class SupportedTargetServerFriendlyNameAttribute : SqlSupportedDimensionAttribute
+ {
+ private readonly ISet _targetServerFriendlyNames;
+
+ public SupportedTargetServerFriendlyNameAttribute(params string[] targetServerFriendlyNames)
+ {
+ this._targetServerFriendlyNames = new HashSet(targetServerFriendlyNames.Distinct());
+ }
+
+ ///
+ /// The server is supported if the friendly name for the target server (defined in ConnectionInfo.xml)
+ /// matches one of the ones we support
+ ///
+ ///
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ return _targetServerFriendlyNames.Contains(targetServerFriendlyName);
+ }
+
+ public override bool IsSupported(SMO.Server server)
+ {
+ //This attribute should only ever be called through the override that passes in the targetServerFriendly name, if not
+ //we should fail out since that's a test framework error that it got to that state
+ throw new InvalidOperationException("The SupportedTargetServerFriendlyName attribute should never have IsSupported called without a TargetServerFriendlyName passed in");
+ }
+
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/TestServerDescriptor.cs b/src/FunctionalTest/Framework/TestFramework/TestServerDescriptor.cs
new file mode 100644
index 00000000..8159e445
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/TestServerDescriptor.cs
@@ -0,0 +1,203 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Diagnostics;
+using System.Linq;
+using System.Xml.Linq;
+using System.Xml.XPath;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Data about a server used for Data Tools test runs
+ ///
+ [DebuggerDisplay("{Name}")]
+ public class TestServerDescriptor
+ {
+ ///
+ /// Connection string for the connection
+ ///
+ public string ConnectionString { get; set; }
+
+ ///
+ /// Backup connection string to use in case of a failure while running
+ /// the test using the first connection string
+ ///
+ public IEnumerable BackupConnnectionStrings { get; set; }
+
+ ///
+ /// Iterates through all the connection strings defined for this server target, starting with the
+ /// primary connection strings and then all of the defined backup connection strings.
+ ///
+ public IEnumerable AllConnectionStrings
+ {
+ get
+ {
+ yield return ConnectionString;
+
+ foreach (string connString in BackupConnnectionStrings)
+ {
+ yield return connString;
+ }
+ }
+ }
+
+ ///
+ /// Expected DatabaseEngineType
+ ///
+ public DatabaseEngineType DatabaseEngineType { get; set; }
+
+ ///
+ /// Enabled features on the server
+ ///
+ public IEnumerable EnabledFeatures { get; set; }
+
+ ///
+ /// The features that the server is reserved for.
+ ///
+ public IEnumerable ReservedFor { get; set; }
+
+ ///
+ /// Expected HostPlatform
+ ///
+ public string HostPlatform { get; set; }
+
+ ///
+ /// Name used to identify the server in configuration
+ ///
+ public string Name { get; set; }
+
+ ///
+ /// Expected DatabaseEngineEdition. Will be Unknown if not provided in the XML
+ ///
+ public DatabaseEngineEdition DatabaseEngineEdition { get; set; }
+
+ ///
+ /// Major version number, eg 13 for SQL2016. 0 if not specified
+ ///
+ public int MajorVersion { get; set; }
+
+ ///
+ /// Returns the set of server connection strings allotted for the current test.
+ ///
+ ///
+ public static IEnumerable GetServerDescriptors(XDocument connStringsDoc, AzureKeyVaultHelper azureKeyVaultHelper)
+ {
+ string targetServersEnvVar =
+ Environment.GetEnvironmentVariable("SqlTestTargetServersFilter", EnvironmentVariableTarget.Process) ??
+ Environment.GetEnvironmentVariable("SqlTestTargetServersFilter", EnvironmentVariableTarget.User) ??
+ Environment.GetEnvironmentVariable("SqlTestTargetServersFilter", EnvironmentVariableTarget.Machine);
+
+ var targetServers =
+ !string.IsNullOrWhiteSpace(targetServersEnvVar) ? new HashSet(
+ targetServersEnvVar.Split(';').Select(l => l.Trim()),
+ StringComparer.OrdinalIgnoreCase)
+ : null;
+
+ if (targetServers != null)
+ {
+ TraceHelper.TraceInformation("Limiting tests to these servers based on environment: {0}", targetServersEnvVar);
+ }
+
+ return
+ connStringsDoc.XPathSelectElements(@"//ConnectionString")
+ .Select(connStringElement => new TestServerDescriptor
+ {
+ ConnectionString = GetConnectionString(connStringElement, azureKeyVaultHelper),
+ BackupConnnectionStrings = new List(new[] { connStringElement.GetStringAttribute("backupConnectionString") }.Where(c => !string.IsNullOrEmpty(c))),
+ Name = connStringElement.GetStringAttribute("name"),
+ HostPlatform = connStringElement.GetStringAttribute("hostplatform"),
+ DatabaseEngineType = connStringElement.GetAttribute("databaseenginetype",
+ (s) => (DatabaseEngineType)Enum.Parse(typeof(DatabaseEngineType), s)),
+ DatabaseEngineEdition =
+ connStringElement.GetAttribute("db_engine_edition",
+ (s) =>
+ s == null
+ ? DatabaseEngineEdition.Unknown
+ : (DatabaseEngineEdition)Enum.Parse(typeof(DatabaseEngineEdition), s)),
+ EnabledFeatures = connStringElement.GetAttribute("enabled_features", GetFeaturesFromString),
+ MajorVersion = connStringElement.GetAttribute("majorversion",
+ (s) => string.IsNullOrEmpty(s) ? 0 : int.Parse(s)),
+ ReservedFor = connStringElement.GetAttribute("reserved_for", GetFeaturesFromString)
+ }).Where((d) => targetServers == null || targetServers.Contains(d.Name)).ToList();
+
+ }
+
+ private static string GetConnectionString(XElement connStringElement, AzureKeyVaultHelper azureKeyVaultHelper)
+ {
+ var baseString = connStringElement.GetStringAttribute("connectionString");
+ var connStringBuilder = new SqlConnectionStringBuilder(baseString);
+ var credentialName = connStringElement.GetStringAttribute("passwordCredential");
+
+ // Fall back to SQL auth on Linux test hosts
+ if (Environment.OSVersion.Platform != PlatformID.Win32NT && connStringBuilder.IntegratedSecurity)
+ {
+ connStringBuilder.IntegratedSecurity = false;
+ if (string.IsNullOrEmpty(connStringBuilder.UserID))
+ {
+ connStringBuilder.UserID = "sa";
+ }
+ }
+
+ if (!connStringBuilder.IntegratedSecurity && !string.IsNullOrEmpty(connStringBuilder.UserID) &&
+ credentialName != null)
+ {
+ if (azureKeyVaultHelper == null)
+ {
+ throw new InvalidOperationException("AzureKeyVaultHelper must be provided to fetch passwords");
+ }
+ connStringBuilder.Password = azureKeyVaultHelper.GetDecryptedSecret(credentialName);
+ }
+
+ return connStringBuilder.ConnectionString;
+ }
+
+
+
+ private static IEnumerable GetFeaturesFromString(string source)
+ {
+ if (string.IsNullOrWhiteSpace(source))
+ {
+ return new SqlFeature[] { };
+ }
+
+ string[] features = source.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries);
+ return features.Select(feature => (SqlFeature)Enum.Parse(typeof(SqlFeature), feature)).ToArray();
+ }
+ }
+
+ static class XElementExtensions
+ {
+ internal static string GetStringAttribute(this XElement xElement, string attrName)
+ {
+ return xElement.GetAttribute(attrName, (s) => s);
+ }
+
+ internal static T GetAttribute(this XElement xElement, string attrName, Func converter = null )
+ {
+ XAttribute attr = xElement.Attribute(attrName);
+ if (attr == null && converter == null)
+ {
+ throw new InvalidOperationException(
+ string.Format("ConnectionString node {0} missing {1} attribute",
+ xElement, attrName));
+ }
+
+ return converter == null ? DefaultConvert(attr.Value) : converter(attr == null ? null : attr.Value);
+ }
+
+ static T DefaultConvert(string s)
+ {
+ return (T) Convert.ChangeType(s, typeof (T));
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/TestServerPoolManager.cs b/src/FunctionalTest/Framework/TestFramework/TestServerPoolManager.cs
new file mode 100644
index 00000000..f5d08fed
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/TestServerPoolManager.cs
@@ -0,0 +1,95 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using Microsoft.SqlServer.Management.Smo;
+
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils
+{
+ ///
+ /// Manager for pools of databases that can be reused by multiple tests so they don't have to recreate a database for each test.
+ ///
+ /// Note that tests using this need to be able to handle the database not being in a perfectly clean state - other tests may leave
+ /// the database in an unclean state.
+ ///
+ /// Because of this tests that use a database from a pool should strive to either clean up their resources created/modified during the test
+ /// or to only make changes that are unlikely to affect other tests.
+ ///
+ public static class TestServerPoolManager
+ {
+ public const string DEFAULT_POOL_NAME = "DEFAULT";
+
+ ///
+ /// DB pools for tests to share and reuse. Note this is shared across all tests that inherit from this test base.
+ /// Key - Pool Name
+ /// Key - ServerName
+ /// Value - List of DBs for that server
+ ///
+ [ThreadStatic]
+ private static IDictionary> databasePools;
+
+ private static IDictionary> DatabasePools =>
+ databasePools ?? (databasePools = new Dictionary>());
+
+ private static ConcurrentBag allDatabases = new ConcurrentBag();
+ static TestServerPoolManager()
+ {
+ // Note we can't use the AssemblyCleanup attribute because that only runs on classes marked as
+ // [TestClass] that have actual tests in them. Since this doesn't contain actual tests we
+ // instead use this event so that the cleanup will still happen without requiring hooking an
+ // AssemblyCleanup into the tests themselves.
+ AppDomain.CurrentDomain.DomainUnload += (_,__) => Cleanup();
+ }
+
+ ///
+ /// Gets a database for the specified server from the specified pool
+ ///
+ /// The name of the pool
+ /// The server to get the database from
+ ///
+ public static Database GetDbFromPool(string poolName, SMO.Server server)
+ {
+ if (!DatabasePools.ContainsKey(poolName))
+ {
+ DatabasePools[poolName] = new Dictionary();
+ }
+
+ var pool = DatabasePools[poolName];
+ if (!pool.ContainsKey((server.Name)))
+ {
+ pool[server.Name] = server.CreateDatabaseWithRetry();
+ allDatabases.Add(pool[server.Name]);
+ }
+
+ return pool[server.Name];
+ }
+
+ ///
+ /// Gets a database for the specified server from the default pool
+ ///
+ /// The server to get the database for
+ ///
+ public static Database GetDbFromPool(SMO.Server server)
+ {
+ return GetDbFromPool(DEFAULT_POOL_NAME, server);
+ }
+
+ ///
+ /// Cleans up the manager by deleting all the databases in the pools.
+ ///
+ private static void Cleanup()
+ {
+ // Clean up all the DBs in the pools
+ foreach(var database in allDatabases)
+ {
+ database.Parent.DropKillDatabaseNoThrow(database.Name);
+ }
+ }
+ }
+}
+
+
diff --git a/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineEditionAttribute.cs b/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineEditionAttribute.cs
new file mode 100644
index 00000000..e7814694
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineEditionAttribute.cs
@@ -0,0 +1,59 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using Microsoft.SqlServer.Management.Common;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Attribute to mark a test method with DatabaseEngineEdition values that it doesn't
+ /// support.
+ ///
+ /// Note this means that by default a test will run against all servers
+ /// regardless of their DatabaseEngineEdition
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)]
+ public class UnsupportedDatabaseEngineEditionAttribute : SqlUnsupportedDimensionAttribute
+ {
+
+ private readonly ISet _unsupportedDatabaseEngineEditions;
+
+ ///
+ ///
+ ///
+ ///
+ public UnsupportedDatabaseEngineEditionAttribute(params DatabaseEngineEdition[] unsupportedDatabaseEngineEditions)
+ {
+ _unsupportedDatabaseEngineEditions = new HashSet(unsupportedDatabaseEngineEditions);
+ }
+
+ ///
+ /// Bypass the server query if the descriptor has a valid edition
+ ///
+ ///
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ if (serverDescriptor.DatabaseEngineEdition == DatabaseEngineEdition.Unknown)
+ {
+ return IsSupported(server);
+ }
+ return !_unsupportedDatabaseEngineEditions.Contains(serverDescriptor.DatabaseEngineEdition);
+ }
+
+ ///
+ /// Returns true if the server is supported, which in this case means that server.DatabaseEngineEdition is
+ /// NOT one of the unsupported values.
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server)
+ {
+ return _unsupportedDatabaseEngineEditions.Contains(server.DatabaseEngineEdition) == false;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineTypeAttribute.cs b/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineTypeAttribute.cs
new file mode 100644
index 00000000..99010d4d
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/UnsupportedDatabaseEngineTypeAttribute.cs
@@ -0,0 +1,45 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using Microsoft.SqlServer.Management.Common;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ ///
+ /// Attribute to mark a test method with DatabaseEngineType values that it doesn't
+ /// support.
+ ///
+ /// Note this means that by default a test will run against all servers
+ /// regardless of their DatabaseEngineType
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)]
+ public class UnsupportedDatabaseEngineTypeAttribute : SqlUnsupportedDimensionAttribute
+ {
+
+ private readonly ISet _unsupportedDatabaseEngineTypes;
+
+ public UnsupportedDatabaseEngineTypeAttribute(params DatabaseEngineType[] unsupportedDatabaseEngineTypes)
+ {
+ _unsupportedDatabaseEngineTypes = new HashSet(unsupportedDatabaseEngineTypes);
+ }
+
+
+ public override bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ return _unsupportedDatabaseEngineTypes.Contains(serverDescriptor.DatabaseEngineType) == false;
+ }
+
+ ///
+ /// Returns true if the server is supported, which in this case means that server.DatabaseEngineType is
+ /// NOT one of the unsupported values.
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server)
+ {
+ return _unsupportedDatabaseEngineTypes.Contains(server.DatabaseEngineType) == false;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/TestFramework/UnsupportedHostPlatformAttribute.cs b/src/FunctionalTest/Framework/TestFramework/UnsupportedHostPlatformAttribute.cs
new file mode 100644
index 00000000..a3f8a19e
--- /dev/null
+++ b/src/FunctionalTest/Framework/TestFramework/UnsupportedHostPlatformAttribute.cs
@@ -0,0 +1,58 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using SMO = Microsoft.SqlServer.Management.Smo;
+
+namespace Microsoft.SqlServer.Test.Manageability.Utils.TestFramework
+{
+ [Flags]
+ public enum SqlHostPlatforms
+ {
+ Windows,
+ Linux
+ }
+
+ ///
+ /// Attribute to mark a test method with HostPlatform values that it doesn't
+ /// support.
+ ///
+ /// Note this means that by default a test will run against all servers
+ /// regardless of their DatabaseEngineType
+ [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)]
+ public class UnsupportedHostPlatformAttribute : SqlUnsupportedDimensionAttribute
+ {
+ private readonly ISet _unsupportedHostPlatforms;
+
+ public UnsupportedHostPlatformAttribute(params SqlHostPlatforms[] unsupportedHostPlatforms)
+ {
+ _unsupportedHostPlatforms = new HashSet(unsupportedHostPlatforms.Select(p => p.ToString()));
+ }
+
+ ///
+ /// Returns true if the server is supported, which in this case means that server.HostPlatform is
+ /// NOT one of the unsupported values.
+ ///
+ ///
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server, TestServerDescriptor serverDescriptor, string targetServerFriendlyName)
+ {
+ return _unsupportedHostPlatforms.Contains(serverDescriptor.HostPlatform) == false;
+ }
+
+ ///
+ /// Returns true if the server is supported, which in this case means that server.HostPlatform is
+ /// NOT one of the unsupported values.
+ ///
+ ///
+ ///
+ public override bool IsSupported(SMO.Server server)
+ {
+ return _unsupportedHostPlatforms.Contains(server.HostPlatform) == false;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Framework/azuresql.runsettings b/src/FunctionalTest/Framework/azuresql.runsettings
new file mode 100644
index 00000000..8693c764
--- /dev/null
+++ b/src/FunctionalTest/Framework/azuresql.runsettings
@@ -0,0 +1,93 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/azuresqldwgen3.runsettings b/src/FunctionalTest/Framework/azuresqldwgen3.runsettings
new file mode 100644
index 00000000..1de2e16b
--- /dev/null
+++ b/src/FunctionalTest/Framework/azuresqldwgen3.runsettings
@@ -0,0 +1,128 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+ .*DesignMode.*
+ .*DoDatabaseSpecialCase\(.*
+ .*GenerateStretchHeapWithClause.*
+ .*GetPropertyDefaultValue\(.*
+ .*InitFromSqlServer2005Store.*
+ .*InvalidSqlServer2005StoreFormatException.*
+ Microsoft\.SqlServer\.Management\.RegisteredServers.\RegisteredServersStore\.AddGroupMember.*
+ Microsoft\.SqlServer\.Management\.Sdk\.Sfc\.SfcSerializer.*
+ Microsoft\.SqlServer\.Management\.Smo\.AvailableSQLServers.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseDdlTriggerEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.PostProcessServerDdlTriggerEvents.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerDdlTriggerEvent.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.GetChildSingleton.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.SetAccessToken.*.*PopulateV7.*
+ .*PopulateV7.*
+ .*PopulateV9.*
+ .*ScriptAlterLess9.*
+ .*ScriptDbProps70Comp.*
+ .*ScriptCreateLess9.*
+ .*TestNetSend
+ .*ThrowIfBelowVersion80SP3.*
+ .*ThrowIfAboveVersion80.*
+ .*ThrowIfAboveVersion100.*
+ .*ThrowIfSourceOrDestBelowVersion80.*
+ .*ThrowIfSourceOrDestBelowVersion90.*
+ .*ThrowIfSourceOrDestBelowVersion100.*
+ .*UpgradeFromSqlServer2005.*
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/azuresqledge.runsettings b/src/FunctionalTest/Framework/azuresqledge.runsettings
new file mode 100644
index 00000000..c2b1e294
--- /dev/null
+++ b/src/FunctionalTest/Framework/azuresqledge.runsettings
@@ -0,0 +1,128 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+ .*DesignMode.*
+ .*DoDatabaseSpecialCase\(.*
+ .*GenerateStretchHeapWithClause.*
+ .*GetPropertyDefaultValue\(.*
+ .*InitFromSqlServer2005Store.*
+ .*InvalidSqlServer2005StoreFormatException.*
+ Microsoft\.SqlServer\.Management\.RegisteredServers.\RegisteredServersStore\.AddGroupMember.*
+ Microsoft\.SqlServer\.Management\.Sdk\.Sfc\.SfcSerializer.*
+ Microsoft\.SqlServer\.Management\.Smo\.AvailableSQLServers.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseDdlTriggerEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.PostProcessServerDdlTriggerEvents.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerDdlTriggerEvent.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.GetChildSingleton.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.SetAccessToken.*.*PopulateV7.*
+ .*PopulateV7.*
+ .*PopulateV9.*
+ .*ScriptAlterLess9.*
+ .*ScriptDbProps70Comp.*
+ .*ScriptCreateLess9.*
+ .*TestNetSend
+ .*ThrowIfBelowVersion80SP3.*
+ .*ThrowIfAboveVersion80.*
+ .*ThrowIfAboveVersion100.*
+ .*ThrowIfSourceOrDestBelowVersion80.*
+ .*ThrowIfSourceOrDestBelowVersion90.*
+ .*ThrowIfSourceOrDestBelowVersion100.*
+ .*UpgradeFromSqlServer2005.*
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/functionaltest.runsettings b/src/FunctionalTest/Framework/functionaltest.runsettings
new file mode 100644
index 00000000..63f78ff0
--- /dev/null
+++ b/src/FunctionalTest/Framework/functionaltest.runsettings
@@ -0,0 +1,134 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+ .*DesignMode.*
+ .*DoDatabaseSpecialCase\(.*
+ .*GenerateStretchHeapWithClause.*
+ .*GetPropertyDefaultValue\(.*
+ .*InitFromSqlServer2005Store.*
+ .*InvalidSqlServer2005StoreFormatException.*
+ Microsoft\.SqlServer\.Management\.RegisteredServers.\RegisteredServersStore\.AddGroupMember.*
+ Microsoft\.SqlServer\.Management\.Sdk\.Sfc\.SfcSerializer.*
+ Microsoft\.SqlServer\.Management\.Smo\.AvailableSQLServers.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseDdlTriggerEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseEvent\.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.PostProcessServerDdlTriggerEvents.*
+ Microsoft\.SqlServer\.Management\.Smo\.PermissionWorker\.AddPermission.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerDdlTriggerEvent.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerEvent\..*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerTraceEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.GetChildSingleton.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.SetAccessToken.*
+ .*PopulateV7.*
+ .*PopulateV9.*
+ .*RemoteData.*
+ .*ScriptAlterLess9.*
+ .*ScriptDbProps70Comp.*
+ .*ScriptCreateLess9.*
+ .*TestNetSend
+ .*ThrowIfBelowVersion80SP3.*
+ .*ThrowIfAboveVersion80.*
+ .*ThrowIfAboveVersion100.*
+ .*ThrowIfSourceOrDestBelowVersion80.*
+ .*ThrowIfSourceOrDestBelowVersion90.*
+ .*ThrowIfSourceOrDestBelowVersion100.*
+ .*UpgradeFromSqlServer2005.*
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/linux.runsettings b/src/FunctionalTest/Framework/linux.runsettings
new file mode 100644
index 00000000..54b1a6be
--- /dev/null
+++ b/src/FunctionalTest/Framework/linux.runsettings
@@ -0,0 +1,96 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/ondemand.runsettings b/src/FunctionalTest/Framework/ondemand.runsettings
new file mode 100644
index 00000000..4d45aa89
--- /dev/null
+++ b/src/FunctionalTest/Framework/ondemand.runsettings
@@ -0,0 +1,93 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sql2008.runsettings b/src/FunctionalTest/Framework/sql2008.runsettings
new file mode 100644
index 00000000..fe4749ee
--- /dev/null
+++ b/src/FunctionalTest/Framework/sql2008.runsettings
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sql2012.runsettings b/src/FunctionalTest/Framework/sql2012.runsettings
new file mode 100644
index 00000000..dbe59bdb
--- /dev/null
+++ b/src/FunctionalTest/Framework/sql2012.runsettings
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sql2014.runsettings b/src/FunctionalTest/Framework/sql2014.runsettings
new file mode 100644
index 00000000..b89e73a7
--- /dev/null
+++ b/src/FunctionalTest/Framework/sql2014.runsettings
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sql2016.runsettings b/src/FunctionalTest/Framework/sql2016.runsettings
new file mode 100644
index 00000000..b3a782c5
--- /dev/null
+++ b/src/FunctionalTest/Framework/sql2016.runsettings
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sql2017.runsettings b/src/FunctionalTest/Framework/sql2017.runsettings
new file mode 100644
index 00000000..109bb7f2
--- /dev/null
+++ b/src/FunctionalTest/Framework/sql2017.runsettings
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sqlexpress.runsettings b/src/FunctionalTest/Framework/sqlexpress.runsettings
new file mode 100644
index 00000000..90598af8
--- /dev/null
+++ b/src/FunctionalTest/Framework/sqlexpress.runsettings
@@ -0,0 +1,132 @@
+
+
+
+
+ TestCategory!=Legacy
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+ .*DesignMode.*
+ .*DoDatabaseSpecialCase\(.*
+ .*GenerateStretchHeapWithClause.*
+ .*GetPropertyDefaultValue\(.*
+ .*InitFromSqlServer2005Store.*
+ .*InvalidSqlServer2005StoreFormatException.*
+ Microsoft\.SqlServer\.Management\.RegisteredServers.\RegisteredServersStore\.AddGroupMember.*
+ Microsoft\.SqlServer\.Management\.Sdk\.Sfc\.SfcSerializer.*
+ Microsoft\.SqlServer\.Management\.Smo\.AvailableSQLServers.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseDdlTriggerEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.PostProcessServerDdlTriggerEvents.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerDdlTriggerEvent.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.GetChildSingleton.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.SetAccessToken.*.*PopulateV7.*
+ .*PopulateV7.*
+ .*PopulateV9.*
+ .*RemoteData.*
+ .*ScriptAlterLess9.*
+ .*ScriptDbProps70Comp.*
+ .*ScriptCreateLess9.*
+ .*TestNetSend
+ .*ThrowIfBelowVersion80SP3.*
+ .*ThrowIfAboveVersion80.*
+ .*ThrowIfAboveVersion100.*
+ .*ThrowIfSourceOrDestBelowVersion80.*
+ .*ThrowIfSourceOrDestBelowVersion90.*
+ .*ThrowIfSourceOrDestBelowVersion100.*
+ .*UpgradeFromSqlServer2005.*
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sqlmi.runsettings b/src/FunctionalTest/Framework/sqlmi.runsettings
new file mode 100644
index 00000000..5896dfc4
--- /dev/null
+++ b/src/FunctionalTest/Framework/sqlmi.runsettings
@@ -0,0 +1,131 @@
+
+
+
+
+ TestCategory!=Legacy&FullyQualifiedName!~XEventDbScoped
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .*Microsoft\.SqlServer\.Test\..*
+
+ .*Microsoft\.SqlServer\.Management\.SqlParser\.dll
+ .*Microsoft\.SqlServer\.RegSvrEnum\.dll
+
+ .*Microsoft\.SqlServer\.Management\.Utility\.dll
+ .*NUnit.*
+ .*moq.*
+
+
+
+
+
+
+
+
+ ^System\.Diagnostics\.DebuggerHiddenAttribute$
+ ^System\.Diagnostics\.DebuggerNonUserCodeAttribute$
+ ^System\.Runtime\.CompilerServices.CompilerGeneratedAttribute$
+ ^System\.CodeDom\.Compiler.GeneratedCodeAttribute$
+ ^System\.Diagnostics\.CodeAnalysis.ExcludeFromCodeCoverageAttribute$
+
+
+
+
+
+
+ .*\\atlmfc\\.*
+ .*\\vctools\\.*
+ .*\\public\\sdk\\.*
+ .*\\microsoft sdks\\.*
+ .*\\vc\\include\\.*
+
+
+
+
+
+
+ .*DesignMode.*
+ .*DoDatabaseSpecialCase\(.*
+ .*GenerateStretchHeapWithClause.*
+ .*GetPropertyDefaultValue\(.*
+ .*InitFromSqlServer2005Store.*
+ .*InvalidSqlServer2005StoreFormatException.*
+ Microsoft\.SqlServer\.Management\.RegisteredServers.\RegisteredServersStore\.AddGroupMember.*
+ Microsoft\.SqlServer\.Management\.Sdk\.Sfc\.SfcSerializer.*
+ Microsoft\.SqlServer\.Management\.Smo\.AvailableSQLServers.*
+ Microsoft\.SqlServer\.Management\.Smo\.DatabaseDdlTriggerEventSet.*
+ Microsoft\.SqlServer\.Management\.Smo\.PostProcessServerDdlTriggerEvents.*
+ Microsoft\.SqlServer\.Management\.Smo\.ServerDdlTriggerEvent.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.GetChildSingleton.*
+ Microsoft\.SqlServer\.Management\.Smo\.SqlSmoObject\.SetAccessToken.*.*PopulateV7.*
+ .*PopulateV7.*
+ .*PopulateV9.*
+ .*ScriptAlterLess9.*
+ .*ScriptDbProps70Comp.*
+ .*ScriptCreateLess9.*
+ .*TestNetSend
+ .*ThrowIfBelowVersion80SP3.*
+ .*ThrowIfAboveVersion80.*
+ .*ThrowIfAboveVersion100.*
+ .*ThrowIfSourceOrDestBelowVersion80.*
+ .*ThrowIfSourceOrDestBelowVersion90.*
+ .*ThrowIfSourceOrDestBelowVersion100.*
+ .*UpgradeFromSqlServer2005.*
+
+
+
+
+
+
+ ^B03F5F7F11D50A3A$
+ ^71E9BCE111E9429C$
+ ^8F50407C4E9E73B6$
+ ^E361AF139669C375$
+
+ ^AE41E2615877EB90$
+
+
+
+
+ True
+ True
+ True
+ False
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sqlv150.runsettings b/src/FunctionalTest/Framework/sqlv150.runsettings
new file mode 100644
index 00000000..50c6ebd8
--- /dev/null
+++ b/src/FunctionalTest/Framework/sqlv150.runsettings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Framework/sqlv160.runsettings b/src/FunctionalTest/Framework/sqlv160.runsettings
new file mode 100644
index 00000000..b370ed1f
--- /dev/null
+++ b/src/FunctionalTest/Framework/sqlv160.runsettings
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/BackupRestoreTests.cs b/src/FunctionalTest/Smo/BackupRestore/BackupRestoreTests.cs
new file mode 100644
index 00000000..0ca493b6
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/BackupRestoreTests.cs
@@ -0,0 +1,147 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using Assert = NUnit.Framework.Assert;
+
+namespace Microsoft.SqlServer.Test.SMO.BackupRestore
+{
+ [TestClass]
+ public class BackupRestoreTests : SqlTestBase
+ {
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = Management.Common.DatabaseEngineEdition.Enterprise)]
+ public void PageRestorePlanner_restores_database_page()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ if (db.RecoveryModel != RecoveryModel.Full)
+ {
+ db.RecoveryModel = RecoveryModel.Full;
+ db.Alter();
+ }
+ var table = db.CreateTable("tbl");
+ table.InsertDataToTable(1000);
+ db.TakeFullBackup();
+ // we have to fake some suspect pages
+ var fileId = db.FileGroups["PRIMARY"].Files[0].ID;
+ ServerContext.ConnectionContext.ExecuteNonQuery($"insert into msdb.dbo.suspect_pages(database_id, file_id, page_id, event_type, error_count) values({db.ID}, {fileId}, 10, 1, 1 )");
+ try
+ {
+ var planner = new PageRestorePlanner(db)
+ {
+ TailLogBackupFile = Path.Combine(db.PrimaryFilePath, $"PageRestore{Guid.NewGuid()}.log")
+ };
+ Assert.That(planner.SuspectPages.Select(p => (p.FileID, p.PageID)), Is.EquivalentTo(new[] { (fileId, 10) }), "planner.SuspectPages");
+ var plan = planner.CreateRestorePlan();
+ plan.Execute();
+ var suspectPages = PageRestorePlanner.GetSuspectPages(db);
+ Assert.That(suspectPages, Is.Empty, "GetSuspectPages after plan.Execute");
+ }
+ finally
+ {
+ this.ServerContext.ConnectionContext.ExecuteNonQuery($"delete from msdb.dbo.suspect_pages where database_id = {db.ID}");
+ }
+ });
+ }
+
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = Management.Common.DatabaseEngineEdition.Enterprise)]
+ public void DatabaseRestorePlanner_restores_database()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ var table = db.CreateTable("tbl");
+ table.InsertDataToTable(1000);
+ db.TakeFullBackup();
+ var table2 = db.CreateTable("gone");
+ table.InsertDataToTable(1000);
+ table.Drop();
+ var planner = new DatabaseRestorePlanner(db.Parent, db.Name)
+ {
+ TailLogBackupFile = Path.Combine(db.PrimaryFilePath, $"DbRestore{Guid.NewGuid()}.log"),
+ BackupTailLog = true,
+ TailLogWithNoRecovery = true
+ };
+ var plan = planner.CreateRestorePlan();
+ plan.Execute();
+ db.Tables.Refresh();
+ Assert.That(db.Tables.Cast
().Select(t => t.Name), Has.Member(table.Name), $"table {table.Name} should be recovered");
+ Assert.That(db.Tables.Cast
().Select(t => t.Name), Has.No.Member(table2.Name), $"table {table2.Name} should be gone");
+ });
+ }
+
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = Management.Common.DatabaseEngineEdition.Enterprise, MinMajor = 15)]
+ // Verifies that during a point in time restore a newer backup with a last LSN that is in the middle of the intended restore range wont break the restore plan
+ public void DatabaseRestorePlanner_restore_database_plan_NewerOutOfSequencePointInTime()
+ {
+ DateTime pointInTime = new DateTime(2021, 03, 26, 21, 30, 0);
+ DatabaseRestorePlanner_GeneratePlan("NewerOutOfSequenceSetup.sql", "NewerOutOfSequencePointInTimePlan.sql", "NewerOutOfSequenceCleanup.sql", pointInTime);
+ }
+
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = Management.Common.DatabaseEngineEdition.Enterprise, MinMajor = 15)]
+ // Verifies that during a point in time restore an older backup with a last LSN in the middle of the intended restore range wont break the restore plan
+ public void DatabaseRestorePlanner_restore_database_plan_OlderOutofSequencePointInTime()
+ {
+ DateTime pointInTime = new DateTime(2021, 04, 19, 00, 15, 0);
+ DatabaseRestorePlanner_GeneratePlan("OlderOutOfSequenceSetup.sql", "OlderOutOfSequencePointInTimePlan.sql", "OlderOutOfSequenceCleanup.sql", pointInTime);
+ }
+
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = Management.Common.DatabaseEngineEdition.Enterprise, MinMajor = 15)]
+ // Verifies that a full backup that started before a Log backup will still be correctly selected for the backup chain
+ public void DatabaseRestorePlanner_restore_database_plan_LongFullBackup()
+ {
+ DatabaseRestorePlanner_GeneratePlan("LongFullBackupSetup.sql", "LongFullBackupPlan.sql", "LongFullBackupCleanup.sql");
+ }
+
+ private void DatabaseRestorePlanner_GeneratePlan(string setupFilename, string expectedPlanFilename, string cleanupFilename, DateTime? pointInTime = null)
+ {
+ ExecuteTest(() =>
+ {
+ Assembly asm = Assembly.GetExecutingAssembly();
+ string cleanupScript;
+ using (StreamReader reader = new StreamReader(asm.GetManifestResourceStream(cleanupFilename)))
+ {
+ cleanupScript = reader.ReadToEnd();
+ }
+ try
+ {
+ // Run the clean up script before setup to make sure nothing will interfere. in a clean environment this effects 0 rows
+ ServerContext.ConnectionContext.ExecuteNonQuery(cleanupScript);
+ string setupScript;
+ using (StreamReader reader = new StreamReader(asm.GetManifestResourceStream(setupFilename)))
+ {
+ setupScript = reader.ReadToEnd();
+ }
+ ServerContext.ConnectionContext.ExecuteNonQuery(setupScript);
+ DatabaseRestorePlanner planner = pointInTime.HasValue ?
+ new DatabaseRestorePlanner(ServerContext, "Data", pointInTime.Value, tailLogBackupFile:null):
+ new DatabaseRestorePlanner(ServerContext, "Data");
+ RestorePlan plan = planner.CreateRestorePlan();
+ string actualScript = plan.Script().ToSingleString();
+ string expectedScript;
+ using (StreamReader reader = new StreamReader(asm.GetManifestResourceStream(expectedPlanFilename)))
+ {
+ expectedScript = reader.ReadToEnd();
+ }
+ Assert.That(actualScript, Is.EqualTo(expectedScript), $"Restore plan should match expected plan");
+ }
+ finally
+ {
+ ServerContext.ConnectionContext.ExecuteNonQuery(cleanupScript);
+ }
+ });
+ }
+ }
+}
diff --git a/src/FunctionalTest/Smo/BackupRestore/LongFullBackupCleanup.sql b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupCleanup.sql
new file mode 100644
index 00000000..f6d14e5f
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupCleanup.sql
@@ -0,0 +1,16 @@
+USE [msdb]
+GO
+
+DECLARE @id1 INT, @id2 INT, @id3 INT, @id4 INT, @id5 INT, @id6 INT
+SET @id1 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810')
+SET @id2 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C')
+SET @id3 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85')
+SET @id4 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4')
+SET @id5 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='13D05B2A-972D-49BD-8E8C-0E0ADB27FD18')
+
+DELETE FROM [dbo].[backupset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+
+DELETE FROM [dbo].[backupmediafamily] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+
+DELETE FROM [dbo].[backupmediaset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/LongFullBackupPlan.sql b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupPlan.sql
new file mode 100644
index 00000000..17ea3768
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupPlan.sql
@@ -0,0 +1,3 @@
+USE [master]
+RESTORE DATABASE [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_FULL_2021_04_18_211501.bak' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE LOG [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_LOG_2021_04_19_043000.trn' WITH FILE = 1, NOUNLOAD
diff --git a/src/FunctionalTest/Smo/BackupRestore/LongFullBackupSetup.sql b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupSetup.sql
new file mode 100644
index 00000000..bda86821
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/LongFullBackupSetup.sql
@@ -0,0 +1,114 @@
+USE [msdb]
+GO
+
+INSERT INTO [dbo].[backupmediaset]
+ ([media_uuid]
+ ,[media_family_count]
+ ,[name]
+ ,[description]
+ ,[software_name]
+ ,[software_vendor_id]
+ ,[MTF_major_version]
+ ,[mirror_count]
+ ,[is_password_protected]
+ ,[is_compressed]
+ ,[is_encrypted])
+ VALUES
+('591E2543-77CD-4710-90EE-EFF499963810', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('CFE28E39-BECA-4B12-A9D4-71B4CD493B8C', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('422D0DAE-2FED-41C3-8E22-6B4884D70A85', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('84BC7B34-140A-437D-9348-B15B8B6036B4', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('13D05B2A-972D-49BD-8E8C-0E0ADB27FD18', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL)
+GO
+
+INSERT INTO [dbo].[backupmediafamily]
+ ([media_set_id]
+ ,[family_sequence_number]
+ ,[media_family_id]
+ ,[media_count]
+ ,[logical_device_name]
+ ,[physical_device_name]
+ ,[device_type]
+ ,[physical_block_size]
+ ,[mirror])
+ VALUES
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810'), 1, '39AC4DAD-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_DIFF_2021_04_18_201500.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C'), 1, '3B333306-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_04_18_203000.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85'), 1, '6BF8211C-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_04_19_003000.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4'), 1, '0071B2F1-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_FULL_2021_04_18_211501.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='13D05B2A-972D-49BD-8E8C-0E0ADB27FD18'), 1, '16B99737-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_04_19_043000.trn', 9, 65536, 0)
+
+GO
+
+INSERT INTO [dbo].[backupset]
+ ([backup_set_uuid]
+ ,[media_set_id]
+ ,[first_family_number]
+ ,[first_media_number]
+ ,[last_family_number]
+ ,[last_media_number]
+ ,[catalog_family_number]
+ ,[catalog_media_number]
+ ,[position]
+ ,[expiration_date]
+ ,[software_vendor_id]
+ ,[name]
+ ,[description]
+ ,[user_name]
+ ,[software_major_version]
+ ,[software_minor_version]
+ ,[software_build_version]
+ ,[time_zone]
+ ,[mtf_minor_version]
+ ,[first_lsn]
+ ,[last_lsn]
+ ,[checkpoint_lsn]
+ ,[database_backup_lsn]
+ ,[database_creation_date]
+ ,[backup_start_date]
+ ,[backup_finish_date]
+ ,[type]
+ ,[sort_order]
+ ,[code_page]
+ ,[compatibility_level]
+ ,[database_version]
+ ,[backup_size]
+ ,[database_name]
+ ,[server_name]
+ ,[machine_name]
+ ,[flags]
+ ,[unicode_locale]
+ ,[unicode_compare_style]
+ ,[collation_name]
+ ,[is_password_protected]
+ ,[recovery_model]
+ ,[has_bulk_logged_data]
+ ,[is_snapshot]
+ ,[is_readonly]
+ ,[is_single_user]
+ ,[has_backup_checksums]
+ ,[is_damaged]
+ ,[begins_log_chain]
+ ,[has_incomplete_metadata]
+ ,[is_force_offline]
+ ,[is_copy_only]
+ ,[first_recovery_fork_guid]
+ ,[last_recovery_fork_guid]
+ ,[fork_point_lsn]
+ ,[database_guid]
+ ,[family_guid]
+ ,[differential_base_lsn]
+ ,[differential_base_guid]
+ ,[compressed_backup_size]
+ ,[key_algorithm]
+ ,[encryptor_thumbprint]
+ ,[encryptor_type])
+ VALUES
+('2DF60F1A-55FF-48D8-A971-EFEFB7F7E434', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 315000007756600000, 322000000237600000, 322000000236800000, 289000004658900000, '3/18/2021 15:06:04.000', '4/18/2021 20:15:00.000', '4/18/2021 20:15:02.000', 'I', 0, 0, 150, 904, 399729664, 'Data', 'Sample', 'Sample', 2576, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', 289000004658900000, 'A713999E-4089-4AC7-B353-160B09B72632', 40681515, NULL, NULL, NULL),
+('1B270B12-8013-42F5-A2EA-C603C5D5B866', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 320000000231800000, 322000000252300000, 322000000236800000, 289000004658900000, '3/18/2021 15:06:04.000', '4/18/2021 20:30:00.000', '4/18/2021 20:30:00.000', 'L', 0, 0, 150, 904, 67401728, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 1118579, NULL, NULL, NULL),
+('338BEACA-31A6-4C11-892A-4AE1CA137490', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 322000000252300000, 324000000242400000, 324000000004300000, 289000004658900000, '3/18/2021 15:06:04.000', '4/19/2021 0:30:00.000', '4/19/2021 0:30:00.000', 'L', 0, 0, 150, 904, 66746368, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 850827, NULL, NULL, NULL),
+('A713999E-4089-4AC7-B353-160B09B72632', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 321000000231800000, 325000000242400000, 289000004658900000, 250000005055600000, '3/18/2021 15:06:04.000', '4/18/2021 21:15:01.000', '4/19/2021 0:45:50.000', 'D', 0, 0, 150, 904, 2286685184, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 591167805, NULL, NULL, NULL),
+('943F4841-022F-4640-9D27-CDD3BE5918FA', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='13D05B2A-972D-49BD-8E8C-0E0ADB27FD18'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 324000000242400000, 327000000391800000, 327000000158300000, 289000004658900000, '3/18/2021 15:06:04.000', '4/19/2021 4:30:00.000', '4/19/2021 4:30:01.000', 'L', 0, 0, 150, 904, 197817344, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 13566920, NULL, NULL, NULL)
+
+
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceCleanup.sql b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceCleanup.sql
new file mode 100644
index 00000000..b3b7f175
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceCleanup.sql
@@ -0,0 +1,17 @@
+USE [msdb]
+GO
+
+DECLARE @id1 INT, @id2 INT, @id3 INT, @id4 INT, @id5 INT, @id6 INT
+SET @id1 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='4B540F0F-80A2-4DDB-AC3F-15A12CC0B582')
+SET @id2 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='F67AEB07-082B-4859-80A4-A3BD9A9EA2BE')
+SET @id3 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='09188E25-3F16-40DF-96A6-F13E8CBBA2CD')
+SET @id4 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='54BA97B4-3A45-4705-AEDD-774069F072B5')
+SET @id5 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='0C6113E2-8517-412F-ABF4-1132F1723CE3')
+SET @id6 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D79AD6D6-2FE0-4903-AEA5-F0856D4F551C')
+
+DELETE FROM [dbo].[backupset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5, @id6)
+
+DELETE FROM [dbo].[backupmediafamily] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5, @id6)
+
+DELETE FROM [dbo].[backupmediaset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5, @id6)
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequencePointInTimePlan.sql b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequencePointInTimePlan.sql
new file mode 100644
index 00000000..5893623e
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequencePointInTimePlan.sql
@@ -0,0 +1,5 @@
+USE [master]
+RESTORE DATABASE [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_FULL_2021_03_24_201551.bak' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE DATABASE [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_DIFF_2021_03_26_201508.bak' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE LOG [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_LOG_2021_03_26_203010.trn' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE LOG [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_LOG_2021_03_27_003008.trn' WITH FILE = 1, NOUNLOAD, STOPAT = N'2021-03-26T21:30:00'
diff --git a/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceSetup.sql b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceSetup.sql
new file mode 100644
index 00000000..3787df48
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/NewerOutOfSequenceSetup.sql
@@ -0,0 +1,116 @@
+USE [msdb]
+GO
+
+INSERT INTO [dbo].[backupmediaset]
+ ([media_uuid]
+ ,[media_family_count]
+ ,[name]
+ ,[description]
+ ,[software_name]
+ ,[software_vendor_id]
+ ,[MTF_major_version]
+ ,[mirror_count]
+ ,[is_password_protected]
+ ,[is_compressed]
+ ,[is_encrypted])
+ VALUES
+('4B540F0F-80A2-4DDB-AC3F-15A12CC0B582', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('F67AEB07-082B-4859-80A4-A3BD9A9EA2BE', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('09188E25-3F16-40DF-96A6-F13E8CBBA2CD', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('54BA97B4-3A45-4705-AEDD-774069F072B5', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('0C6113E2-8517-412F-ABF4-1132F1723CE3', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('D79AD6D6-2FE0-4903-AEA5-F0856D4F551C', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL)
+GO
+
+INSERT INTO [dbo].[backupmediafamily]
+ ([media_set_id]
+ ,[family_sequence_number]
+ ,[media_family_id]
+ ,[media_count]
+ ,[logical_device_name]
+ ,[physical_device_name]
+ ,[device_type]
+ ,[physical_block_size]
+ ,[mirror])
+ VALUES
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='4B540F0F-80A2-4DDB-AC3F-15A12CC0B582'), 1, 'DF2CF001-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_FULL_2021_03_24_201551.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='F67AEB07-082B-4859-80A4-A3BD9A9EA2BE'), 1, 'C27CDD31-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_DIFF_2021_03_26_201508.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='09188E25-3F16-40DF-96A6-F13E8CBBA2CD'), 1, 'B6E65A10-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_03_26_203010.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='54BA97B4-3A45-4705-AEDD-774069F072B5'), 1, 'BE480E20-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_03_27_003008.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='0C6113E2-8517-412F-ABF4-1132F1723CE3'), 1, '853E1D8A-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_03_29_083001.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D79AD6D6-2FE0-4903-AEA5-F0856D4F551C'), 1, '69D8A9BC-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_03_29_083632.trn', 9, 65536, 0)
+
+GO
+
+INSERT INTO [dbo].[backupset]
+ ([backup_set_uuid]
+ ,[media_set_id]
+ ,[first_family_number]
+ ,[first_media_number]
+ ,[last_family_number]
+ ,[last_media_number]
+ ,[catalog_family_number]
+ ,[catalog_media_number]
+ ,[position]
+ ,[expiration_date]
+ ,[software_vendor_id]
+ ,[name]
+ ,[description]
+ ,[user_name]
+ ,[software_major_version]
+ ,[software_minor_version]
+ ,[software_build_version]
+ ,[time_zone]
+ ,[mtf_minor_version]
+ ,[first_lsn]
+ ,[last_lsn]
+ ,[checkpoint_lsn]
+ ,[database_backup_lsn]
+ ,[database_creation_date]
+ ,[backup_start_date]
+ ,[backup_finish_date]
+ ,[type]
+ ,[sort_order]
+ ,[code_page]
+ ,[compatibility_level]
+ ,[database_version]
+ ,[backup_size]
+ ,[database_name]
+ ,[server_name]
+ ,[machine_name]
+ ,[flags]
+ ,[unicode_locale]
+ ,[unicode_compare_style]
+ ,[collation_name]
+ ,[is_password_protected]
+ ,[recovery_model]
+ ,[has_bulk_logged_data]
+ ,[is_snapshot]
+ ,[is_readonly]
+ ,[is_single_user]
+ ,[has_backup_checksums]
+ ,[is_damaged]
+ ,[begins_log_chain]
+ ,[has_incomplete_metadata]
+ ,[is_force_offline]
+ ,[is_copy_only]
+ ,[first_recovery_fork_guid]
+ ,[last_recovery_fork_guid]
+ ,[fork_point_lsn]
+ ,[database_guid]
+ ,[family_guid]
+ ,[differential_base_lsn]
+ ,[differential_base_guid]
+ ,[compressed_backup_size]
+ ,[key_algorithm]
+ ,[encryptor_thumbprint]
+ ,[encryptor_type])
+ VALUES
+('F3127A6A-5DBE-46D2-867B-43C3D5693584', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='4B540F0F-80A2-4DDB-AC3F-15A12CC0B582'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 4, 0, 153385000004237000000, 153385000004238000000, 153385000004237000000, 153225000010774000000, '11/24/2020 7:24:52.000', '3/24/2021 20:15:51.000', '3/24/2021 20:16:25.000', 'D', 0, 0, 150, 904, 3750102016, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', NULL, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', NULL, NULL, 761988426, NULL, NULL, NULL),
+('EB378E11-A86C-45AD-8E44-845BA11EDABC', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='F67AEB07-082B-4859-80A4-A3BD9A9EA2BE'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 4, 0, 154152000006482000000, 154152000006482000000, 154152000006482000000, 153385000004237000000, '11/24/2020 7:24:52.000', '3/26/2021 20:15:08.000', '3/26/2021 20:15:11.000', 'I', 0, 0, 150, 904, 247863296, 'Data', 'Sample', 'Sample', 2576, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', NULL, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', 153385000004237000000, 'F3127A6A-5DBE-46D2-867B-43C3D5693584', 39828859, NULL, NULL, NULL),
+('D3D5DCFD-684F-4501-909F-FBF788516E70', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='09188E25-3F16-40DF-96A6-F13E8CBBA2CD'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 4, 0, 154093000002340000000, 154157000007000000000, 154157000000598000000, 153385000004237000000, '11/24/2020 7:24:52.000', '3/26/2021 20:30:10.000', '3/26/2021 20:30:44.000', 'L', 0, 0, 150, 904, 3961460736, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', NULL, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', NULL, NULL, 870083921, NULL, NULL, NULL),
+('501A44F8-77C1-406A-8917-2BEAB711C57E', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='54BA97B4-3A45-4705-AEDD-774069F072B5'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 4, 0, 154157000007000000000, 154187000004732000000, 154187000002440000000, 153385000004237000000, '11/24/2020 7:24:52.000', '3/27/2021 0:30:08.000', '3/27/2021 0:30:21.000', 'L', 0, 0, 150, 904, 1695583232, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', NULL, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', NULL, NULL, 363724342, NULL, NULL, NULL),
+('F46FA8A6-591D-4821-9BBB-34137943C9A5', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='0C6113E2-8517-412F-ABF4-1132F1723CE3'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 154157000007000000000, 154176000007131000000, 154176000007123000000, 153385000004237000000, '11/24/2020 7:24:52.000', '3/29/2021 8:30:01.000', '3/29/2021 8:30:12.000', 'L', 0, 0, 150, 904, 986298368, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '9319F59E-702A-47E4-83AC-4FBAE0C9D9F7', 'E5C06515-4559-413E-87A6-64DC928F598B', 154176000007060000000, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', NULL, NULL, 206666505, NULL, NULL, NULL),
+('03B24566-D8F2-49BE-8ADE-907AB5172415', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D79AD6D6-2FE0-4903-AEA5-F0856D4F551C'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 154176000007131000000, 154176000007150000000, 154176000007137000000, 154176000007137000000, '11/24/2020 7:24:52.000', '3/29/2021 8:36:32.000', '3/29/2021 8:36:32.000', 'L', 0, 0, 150, 904, 328704, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 'E5C06515-4559-413E-87A6-64DC928F598B', 'E5C06515-4559-413E-87A6-64DC928F598B', NULL, 'A51B2F55-E442-43A9-8DA1-42E72950BDB0', 'C1D08C7F-0D1F-4A92-B3BC-06D932519553', NULL, NULL, 105255, NULL, NULL, NULL)
+
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceCleanup.sql b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceCleanup.sql
new file mode 100644
index 00000000..6448ed68
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceCleanup.sql
@@ -0,0 +1,16 @@
+USE [msdb]
+GO
+
+DECLARE @id1 INT, @id2 INT, @id3 INT, @id4 INT, @id5 INT, @id6 INT
+SET @id1 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4')
+SET @id2 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810')
+SET @id3 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C')
+SET @id4 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D81AA204-995E-43CF-AE3B-54A2AD95ACEA')
+SET @id5 = (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85')
+
+DELETE FROM [dbo].[backupset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+
+DELETE FROM [dbo].[backupmediafamily] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+
+DELETE FROM [dbo].[backupmediaset] WHERE media_set_id IN (@id1, @id2, @id3, @id4, @id5)
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequencePointInTimePlan.sql b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequencePointInTimePlan.sql
new file mode 100644
index 00000000..6895d394
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequencePointInTimePlan.sql
@@ -0,0 +1,5 @@
+USE [master]
+RESTORE DATABASE [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_FULL_2021_04_14_201501.bak' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE DATABASE [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_DIFF_2021_04_18_201500.bak' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE LOG [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_LOG_2021_04_18_203000.trn' WITH FILE = 1, NORECOVERY, NOUNLOAD
+RESTORE LOG [Data] FROM URL = N'https://test.windows.net/sample/FULL/Data_LOG_2021_04_19_003000.trn' WITH FILE = 1, NOUNLOAD, STOPAT = N'2021-04-19T00:15:00'
diff --git a/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceSetup.sql b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceSetup.sql
new file mode 100644
index 00000000..af6ee15c
--- /dev/null
+++ b/src/FunctionalTest/Smo/BackupRestore/OlderOutOfSequenceSetup.sql
@@ -0,0 +1,114 @@
+USE [msdb]
+GO
+
+INSERT INTO [dbo].[backupmediaset]
+ ([media_uuid]
+ ,[media_family_count]
+ ,[name]
+ ,[description]
+ ,[software_name]
+ ,[software_vendor_id]
+ ,[MTF_major_version]
+ ,[mirror_count]
+ ,[is_password_protected]
+ ,[is_compressed]
+ ,[is_encrypted])
+ VALUES
+('D81AA204-995E-43CF-AE3B-54A2AD95ACEA', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('84BC7B34-140A-437D-9348-B15B8B6036B4', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('591E2543-77CD-4710-90EE-EFF499963810', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('CFE28E39-BECA-4B12-A9D4-71B4CD493B8C', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL),
+('422D0DAE-2FED-41C3-8E22-6B4884D70A85', 1, NULL, NULL, 'Microsoft SQL Server', 4608, 1, 1, 0, 1, NULL)
+GO
+
+INSERT INTO [dbo].[backupmediafamily]
+ ([media_set_id]
+ ,[family_sequence_number]
+ ,[media_family_id]
+ ,[media_count]
+ ,[logical_device_name]
+ ,[physical_device_name]
+ ,[device_type]
+ ,[physical_block_size]
+ ,[mirror])
+ VALUES
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D81AA204-995E-43CF-AE3B-54A2AD95ACEA'), 1, 'F236C988-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_03_18_125132.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4'), 1, '0071B2F1-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_FULL_2021_04_14_201501.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810'), 1, '39AC4DAD-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_DIFF_2021_04_18_201500.bak', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C'), 1, '3B333306-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_04_18_203000.trn', 9, 65536, 0),
+((SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85'), 1, '6BF8211C-0000-0000-0000-000000000000', 1, NULL, 'https://test.windows.net/sample/FULL/Data_LOG_2021_04_19_003000.trn', 9, 65536, 0)
+
+GO
+
+INSERT INTO [dbo].[backupset]
+ ([backup_set_uuid]
+ ,[media_set_id]
+ ,[first_family_number]
+ ,[first_media_number]
+ ,[last_family_number]
+ ,[last_media_number]
+ ,[catalog_family_number]
+ ,[catalog_media_number]
+ ,[position]
+ ,[expiration_date]
+ ,[software_vendor_id]
+ ,[name]
+ ,[description]
+ ,[user_name]
+ ,[software_major_version]
+ ,[software_minor_version]
+ ,[software_build_version]
+ ,[time_zone]
+ ,[mtf_minor_version]
+ ,[first_lsn]
+ ,[last_lsn]
+ ,[checkpoint_lsn]
+ ,[database_backup_lsn]
+ ,[database_creation_date]
+ ,[backup_start_date]
+ ,[backup_finish_date]
+ ,[type]
+ ,[sort_order]
+ ,[code_page]
+ ,[compatibility_level]
+ ,[database_version]
+ ,[backup_size]
+ ,[database_name]
+ ,[server_name]
+ ,[machine_name]
+ ,[flags]
+ ,[unicode_locale]
+ ,[unicode_compare_style]
+ ,[collation_name]
+ ,[is_password_protected]
+ ,[recovery_model]
+ ,[has_bulk_logged_data]
+ ,[is_snapshot]
+ ,[is_readonly]
+ ,[is_single_user]
+ ,[has_backup_checksums]
+ ,[is_damaged]
+ ,[begins_log_chain]
+ ,[has_incomplete_metadata]
+ ,[is_force_offline]
+ ,[is_copy_only]
+ ,[first_recovery_fork_guid]
+ ,[last_recovery_fork_guid]
+ ,[fork_point_lsn]
+ ,[database_guid]
+ ,[family_guid]
+ ,[differential_base_lsn]
+ ,[differential_base_guid]
+ ,[compressed_backup_size]
+ ,[key_algorithm]
+ ,[encryptor_thumbprint]
+ ,[encryptor_type])
+ VALUES
+('F6B455CB-CDF4-4A7A-9FD0-80906BB55BB6', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='D81AA204-995E-43CF-AE3B-54A2AD95ACEA'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 4, 0, 323000009849500000, 323000009866500000, 323000009865300000, 323000009865300000, '2/26/2021 10:58:48.000', '3/18/2021 12:51:32.000', '3/18/2021 12:51:32.000', 'L', 0, 0, 150, 904, 282624, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '8FD4DA4B-6CB6-4863-8297-3D41F825D9B6', '8FD4DA4B-6CB6-4863-8297-3D41F825D9B6', NULL, '14759E0B-3EA3-43BE-83D3-9B2F4D5E3170', '3F607B2A-6A17-4CA4-BBCB-2CF67FCCA491', NULL, NULL, 91546, NULL, NULL, NULL),
+('A713999E-4089-4AC7-B353-160B09B72632', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='84BC7B34-140A-437D-9348-B15B8B6036B4'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 284000004313500000, 289000004659700000, 289000004658900000, 250000005055600000, '3/18/2021 15:06:04.000', '4/14/2021 20:15:01.000', '4/14/2021 20:15:50.000', 'D', 0, 0, 150, 904, 2286685184, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 591167805, NULL, NULL, NULL),
+('2DF60F1A-55FF-48D8-A971-EFEFB7F7E434', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='591E2543-77CD-4710-90EE-EFF499963810'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 315000007756600000, 322000000237600000, 322000000236800000, 289000004658900000, '3/18/2021 15:06:04.000', '4/18/2021 20:15:00.000', '4/18/2021 20:15:02.000', 'I', 0, 0, 150, 904, 399729664, 'Data', 'Sample', 'Sample', 2576, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', 289000004658900000, 'A713999E-4089-4AC7-B353-160B09B72632', 40681515, NULL, NULL, NULL),
+('1B270B12-8013-42F5-A2EA-C603C5D5B866', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='CFE28E39-BECA-4B12-A9D4-71B4CD493B8C'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 320000000231800000, 322000000252300000, 322000000236800000, 289000004658900000, '3/18/2021 15:06:04.000', '4/18/2021 20:30:00.000', '4/18/2021 20:30:00.000', 'L', 0, 0, 150, 904, 67401728, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 1118579, NULL, NULL, NULL),
+('338BEACA-31A6-4C11-892A-4AE1CA137490', (SELECT media_set_id FROM dbo.backupmediaset WHERE media_uuid='422D0DAE-2FED-41C3-8E22-6B4884D70A85'), 1, 1, 1, 1, 1, 1, 1, NULL, 4608, NULL, NULL, 'GLOBAL\test$', 15, 0, 4102, 8, 0, 322000000252300000, 324000000242400000, 324000000004300000, 289000004658900000, '3/18/2021 15:06:04.000', '4/19/2021 0:30:00.000', '4/19/2021 0:30:00.000', 'L', 0, 0, 150, 904, 66746368, 'Data', 'Sample', 'Sample', 528, 1033, 196609, 'Coll', 0, 'FULL', 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, '857B55F7-2E1A-43E8-84C1-848D68D52EF9', '857B55F7-2E1A-43E8-84C1-848D68D52EF9', NULL, '355F59DF-59A9-4CA6-8269-22299704E340', '68C4BBF2-3AC5-4535-BEE1-95CEA76FE4C3', NULL, NULL, 850827, NULL, NULL, NULL)
+
+
+GO
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/DMF/ExecutionHistoryTests.cs b/src/FunctionalTest/Smo/DMF/ExecutionHistoryTests.cs
new file mode 100644
index 00000000..cfb71bdd
--- /dev/null
+++ b/src/FunctionalTest/Smo/DMF/ExecutionHistoryTests.cs
@@ -0,0 +1,376 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Data;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Linq;
+using System.Text;
+using System.Xml;
+using System.Xml.XPath;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Dmf;
+using Microsoft.SqlServer.Management.Sdk.Sfc;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+using TraceHelper = Microsoft.SqlServer.Test.Manageability.Utils.Helpers.TraceHelper;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using Assert = NUnit.Framework.Assert;
+
+namespace Microsoft.SqlServer.Test.SMO.DMF
+{
+ ///
+ /// Porting DMV Execution History tests from ds_main
+ /// Merged this class with ExecutionHistoryTests to avoid race conditions during parallel test execution
+ ///
+ public partial class PolicyTests : SqlTestBase
+ {
+ ///
+ /// Verifies that when the LogOnSuccess config option is set to true a successful
+ /// policy evaluation results in a single entry in the EvaluationHistories table.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 10)]
+
+ public void When_LogOnSuccess_is_true_Policy_EvaluationHistories_match_server_data()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ Policy policy = null;
+ Condition condition = null;
+ ObjectSet objectSet = null;
+ TraceHelper.TraceInformation("Setting LogOnSuccess to true");
+ var configValue =
+ (int)
+ ServerContext.ConnectionContext.ExecuteScalar(
+ "select current_value from msdb.dbo.syspolicy_configuration where name='LogOnSuccess'");
+ ServerContext.ConnectionContext.ExecuteNonQuery(
+ "exec msdb.dbo.sp_syspolicy_configure @name=N'LogOnSuccess', @value=1");
+ try
+ {
+
+ var policyStore = new PolicyStore(new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+ var expr = new ExpressionNodeOperator(OperatorType.EQ, new ExpressionNodeAttribute("Name"),
+ ExpressionNode.ConstructNode(db.Name));
+ condition = policyStore.CreateCondition(typeof (Database).Name, expr);
+ objectSet = policyStore.CreateObjectSet(
+ facet: "Database",
+ targetSetsAndLevelConditions: new Dictionary>>()
+ {
+ {
+ // No filter because we want a mix of success and failure
+ "Server/Database",
+ new [] {
+ new Tuple("Server/Database", string.Empty),
+ }
+ }},
+ objectSetNamePrefix: this.TestMethod.Name);
+
+ policy = policyStore.CreatePolicy(condition.Name, AutomatedPolicyEvaluationMode.None, objectSet.Name);
+
+ TraceHelper.TraceInformation("Evaluating Policy {0} in Check mode", policy.Name);
+ policy.Evaluate(AdHocPolicyEvaluationMode.Check, policyStore.SqlStoreConnection);
+ TraceHelper.TraceInformation("Evaluating Policy {0} in Configure mode", policy.Name);
+ policy.Evaluate(AdHocPolicyEvaluationMode.Configure, policyStore.SqlStoreConnection);
+ VerifyExecutionHistory(policy);
+ Assert.That(
+ policy.EvaluationHistories.SelectMany(eh => eh.ConnectionEvaluationHistories)
+ .SelectMany(c => c.EvaluationDetails)
+ .Select(ed => ed.Result), Has.Exactly(1).True, "Expected 1 evaluation successful match");
+ foreach (var evaluationHistory in policy.EvaluationHistories)
+ {
+ VerifyHistoryDetail(policy, evaluationHistory.ID);
+ }
+ }
+ finally
+ {
+ try
+ {
+ TraceHelper.TraceInformation("Setting LogOnSuccess to " + configValue);
+ ServerContext.ConnectionContext.ExecuteNonQuery(
+ "exec msdb.dbo.sp_syspolicy_configure @name=N'LogOnSuccess', @value=" + configValue);
+ }
+ catch (Exception e)
+ {
+ TraceHelper.TraceInformation("Unable to restore LogOnSuccess config value: " + e);
+ }
+ SmoObjectHelpers.SafeDrop(policy, condition, objectSet);
+ }
+ });
+ }
+
+ ///
+ /// Verifies that when a policy fails a well-formed XML is returned by the
+ /// PolicyEvaluationWriter and contains the correct content.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 10)]
+
+ public void When_Evaluate_fails_PolicyEvaluationResultsWriter_generates_correct_xml()
+ {
+ ExecuteTest(() =>
+ {
+ Policy policy = null;
+ Condition condition = null;
+ ObjectSet objectSet = null;
+ try
+ {
+ var xpCmdShellEnabled = (1 == ServerContext.Configuration.XPCmdShellEnabled.RunValue);
+ var policyStore = new PolicyStore(new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+ var expr = new ExpressionNodeOperator(OperatorType.NE, new ExpressionNodeAttribute("XPCmdShellEnabled"),
+ ExpressionNode.ConstructNode(xpCmdShellEnabled));
+ condition = policyStore.CreateCondition(typeof (ISurfaceAreaFacet).Name, expr);
+ objectSet = policyStore.CreateObjectSet(
+ facet: "ISurfaceAreaFacet",
+ targetSetsAndLevelConditions:null,
+ // object set names have 128 char max
+ objectSetNamePrefix: this.TestMethod.Name.Substring(20));
+
+ policy = policyStore.CreatePolicy(condition.Name, AutomatedPolicyEvaluationMode.None, objectSet.Name);
+
+ TraceHelper.TraceInformation("Evaluating Policy {0} in Check mode", policy.Name);
+ var result = policy.Evaluate(AdHocPolicyEvaluationMode.Check, policyStore.SqlStoreConnection);
+ Assert.That(result, Is.False, "Evaluate XPCmdShellEnabled != {0} should return false", xpCmdShellEnabled);
+ Assert.That(policy.EvaluationHistories.Count, Is.EqualTo(1), "One only evaluation expected");
+ var stringBuilder = new StringBuilder();
+ var xmlWriter = XmlTextWriter.Create(stringBuilder,
+ PolicyEvaluationResultsWriter.GetXmlWriterSettings());
+ using (var resultsWriter = new PolicyEvaluationResultsWriter(xmlWriter))
+ {
+ foreach (EvaluationHistory evaluationHistory in policy.EvaluationHistories)
+ {
+ resultsWriter.WriteEvaluationHistory(evaluationHistory);
+ }
+ }
+ xmlWriter.Flush();
+ TraceHelper.TraceInformation("Serialized history: " + stringBuilder);
+ XmlDocument document = new XmlDocument();
+ document.LoadXml(stringBuilder.ToString());
+ XmlNamespaceManager nsManager = SfcXmlHelper.GetXmlNsManager(document);
+
+ XmlHelper.SelectFirstAndOnlyNode("/PolicyEvaluationResults", document, nsManager);
+
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationHistory", document, nsManager);
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationHistory/dmf:StartDate", document, nsManager);
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationHistory/dmf:EndDate", document, nsManager);
+
+ XPathNavigator selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationHistory/dmf:Exception", document, nsManager);
+ Assert.That(selection.Value, Is.Null.Or.Empty, "The history reports an exception, but there should not be one.");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationHistory/dmf:Result", document, nsManager);
+ Assert.That(selection.ValueAs(typeof(bool)), Is.EqualTo(result), "The history reports a different result than the result from the evaluation.");
+
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:ConnectionEvaluationHistory", document, nsManager);
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:ConnectionEvaluationHistory/dmf:Exception", document, nsManager);
+ Assert.That(selection.Value, Is.Null.Or.Empty, "The connection history reports an exception, but there should not be one.");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:ConnectionEvaluationHistory/dmf:Result", document, nsManager);
+ Assert.That(selection.ValueAs(typeof(bool)), Is.EqualTo(result), "The connection history reports a different result than the result from the evaluation.");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:ConnectionEvaluationHistory/dmf:ServerInstance", document, nsManager);
+ Assert.That(selection.Value, Does.Contain(ServerContext.ConnectionContext.TrueName).IgnoreCase, "The connection history reports an incorrect server name");
+
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail", document, nsManager);
+ XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail/dmf:EvaluationDate", document, nsManager);
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail/dmf:Exception", document, nsManager);
+ Assert.That(selection.Value, Is.Null.Or.Empty, "The detail reports an exception, but there should not be one.");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail/dmf:ResultDetail", document, nsManager);
+ Assert.That(selection.Value, Is.Not.Null.And.Not.Empty, "ResultDetail empty");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail/dmf:TargetQueryExpression", document, nsManager);
+ Assert.That(selection.Value, Is.Not.Null.And.Not.Empty, "TargetQueryExpression empty");
+
+ selection = XmlHelper.SelectFirstAndOnlyNode("//dmf:EvaluationDetail/dmf:Result", document, nsManager);
+ Assert.That(selection.ValueAs(typeof(bool)), Is.EqualTo(result), "The detail reports a different result than the result from the evaluation.");
+ }
+ finally
+ {
+ SmoObjectHelpers.SafeDrop(policy, condition, objectSet);
+ }
+ });
+ }
+
+ ///
+ /// Verifies that when a condition has an OR operator the EvaluationHistories list
+ /// contains 2 entries
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 10)]
+
+ public void When_Condition_has_OR_operator_ConnectionEvaluationHistories_contains_multiple_details()
+ {
+ ExecuteWithDbDrop((db) =>
+ {
+ Policy policy = null;
+ Condition condition = null;
+ ObjectSet objectSet = null;
+ Condition dbNameCondition = null;
+ try
+ {
+ var policyStore = new PolicyStore(new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+ ExpressionNode expr1 = new ExpressionNodeOperator(OperatorType.LIKE, new ExpressionNodeAttribute("Name"), new ExpressionNodeConstant("Temp%"));
+ ExpressionNode expr2 = new ExpressionNodeOperator(OperatorType.EQ, new ExpressionNodeAttribute("IsFixedRole"), new ExpressionNodeFunction(ExpressionNodeFunction.Function.False));
+ ExpressionNode expr = new ExpressionNodeOperator(OperatorType.OR, expr1, expr2);
+ //Create the condition we'll use to filter out the database name for the object set
+ dbNameCondition = policyStore.CreateCondition(
+ facet: "Database",
+ expressionNodeExpression: string.Format("@Name = '{0}'", Urn.EscapeString(db.Name)),
+ conditionNamePrefix: "Database_Name_Condition");
+ condition = policyStore.CreateCondition("DatabaseRole", expr);
+ objectSet = policyStore.CreateObjectSet(
+ facet: "DatabaseRole",
+ targetSetsAndLevelConditions: new Dictionary>>()
+ {
+ {
+ "Server/Database/Role",
+ new[]
+ {
+ new Tuple("Server/Database", dbNameCondition.Name),
+ new Tuple("Server/Database/Role", string.Empty),
+ }
+ }
+ },
+ // object set names have 128 char max
+ objectSetNamePrefix: this.TestMethod.Name.Substring(20));
+
+ policy = policyStore.CreatePolicy(condition.Name, AutomatedPolicyEvaluationMode.None, objectSet.Name);
+
+ TraceHelper.TraceInformation("Evaluating Policy {0} in Check mode", policy.Name);
+ var result = policy.Evaluate(AdHocPolicyEvaluationMode.Check, policyStore.SqlStoreConnection);
+ Assert.That(result, Is.False, "Policy should evaluate False");
+ Assert.That(policy.EvaluationHistories.Count, Is.EqualTo(1), "One evaluation history expected");
+ var detailsCount =
+ policy.EvaluationHistories.SelectMany(eh => eh.ConnectionEvaluationHistories)
+ .Sum(ceh => ceh.EvaluationDetails.Count);
+ TraceHelper.TraceInformation("Number of detail records:" + detailsCount);
+ Assert.That(detailsCount, Is.AtLeast(2), "History detail records expected");
+ }
+ finally
+ {
+ SmoObjectHelpers.SafeDrop(policy, condition, objectSet, dbNameCondition);
+ }
+ });
+ }
+
+ ///
+ /// Verifies that when a database is offline evaluating a policy that checks
+ /// DB names completes successfully and does not record any exceptions.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 10)]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ public void When_Database_is_offline_evaluate_does_not_record_an_exception()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ Policy policy = null;
+ Condition condition = null;
+ ObjectSet objectSet = null;
+ Condition dbNameCondition = null;
+ try
+ {
+ db.SetOffline();
+ var policyStore = new PolicyStore(new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+ var expr = new ExpressionNodeOperator(OperatorType.EQ, new ExpressionNodeAttribute("Schema"),
+ ExpressionNode.ConstructNode("dbo"));
+ //Create the condition we'll use to filter out the database name for the object set
+ dbNameCondition = policyStore.CreateCondition(
+ facet: "Database",
+ expressionNodeExpression: string.Format("@Name = '{0}'", Urn.EscapeString(db.Name)),
+ conditionNamePrefix: "Database_Name_Condition");
+ condition = policyStore.CreateCondition("Table", expr);
+ objectSet = policyStore.CreateObjectSet(
+ facet: "Table",
+ targetSetsAndLevelConditions: new Dictionary>>()
+ {
+ {
+ "Server/Database/Table",
+ new[]
+ {
+ new Tuple("Server/Database", dbNameCondition.Name),
+ new Tuple("Server/Database/Table", string.Empty),
+ }
+ }
+ },
+ objectSetNamePrefix: this.TestMethod.Name);
+
+ policy = policyStore.CreatePolicy(condition.Name, AutomatedPolicyEvaluationMode.None, objectSet.Name);
+
+ TraceHelper.TraceInformation("Evaluating Policy {0} in Check mode", policy.Name);
+ // return value doesn't matter
+ policy.Evaluate(AdHocPolicyEvaluationMode.Check, policyStore.SqlStoreConnection);
+ Assert.That(policy.EvaluationHistories.Count, Is.EqualTo(1), "One evaluation history expected");
+ Assert.That(
+ policy.EvaluationHistories.SelectMany(eh => eh.ConnectionEvaluationHistories)
+ .SelectMany(ceh => ceh.EvaluationDetails)
+ .Select(d => d.Exception), Has.All.Null.Or.Empty, "No exception expected");
+ }
+ finally
+ {
+ SmoObjectHelpers.SafeDrop(policy, condition, objectSet, dbNameCondition);
+ }
+ });
+ }
+
+ private void VerifyExecutionHistory(Policy policy, int expectedCount = 2)
+ {
+ TraceHelper.TraceInformation("Comparing SMO Policy.EvaluationHistories with query output");
+ var dataSet = ServerContext.ExecutionManager.ConnectionContext.ExecuteWithResults(
+ "SELECT history_id AS ID, start_date AS StartDate, end_date AS EndDate, result AS Result, exception AS Exception FROM msdb.dbo.syspolicy_policy_execution_history WHERE policy_id=" +
+ policy.ID);
+ var table = dataSet.Tables[0];
+ var rows = table.Rows.OfType().ToList();
+ Assert.That(rows.Count, Is.EqualTo(expectedCount), "Unexpected number of history entries");
+ var histories = policy.EvaluationHistories;
+ Assert.That(histories.Select(h => h.ID), Is.EquivalentTo(rows.Select(r => (long) r.ItemArray[0])),
+ "IDs don't match");
+ Assert.That(histories.Select(h => h.StartDate), Is.EquivalentTo(rows.Select(r => r.ItemArray[1])),
+ "Start dates don't match");
+ Assert.That(histories.Select(h => h.EndDate), Is.EquivalentTo(rows.Select(r => r.ItemArray[2])),
+ "End dates don't match");
+ Assert.That(histories.Select(h => h.Result), Is.EquivalentTo(rows.Select(r => r.ItemArray[3])),
+ "Results don't match");
+ Assert.That(histories.Select(h => h.Exception), Is.EquivalentTo(rows.Select(r => r.ItemArray[4])),
+ "Exceptions don't match");
+ }
+
+ private void VerifyHistoryDetail(Policy policy, long historyId)
+ {
+ TraceHelper.TraceInformation("Comparing SMO ConnectionEvaluationHistory.EvaluationDetails with query output for history id {0}", historyId);
+ var dataSet = ServerContext.ExecutionManager.ConnectionContext.ExecuteWithResults(
+ "select detail_id as ID, target_query_expression as TargetQueryExpression, execution_date as ExecutionDate, result as Result, result_detail from msdb.dbo.syspolicy_policy_execution_history_details where history_id=" +
+ historyId);
+ var table = dataSet.Tables[0];
+ var rows = table.Rows.OfType().ToList();
+ TraceHelper.TraceInformation("Expecting {0} rows", rows.Count);
+ var details =
+ policy.EvaluationHistories[historyId].ConnectionEvaluationHistories.SelectMany(c => c.EvaluationDetails)
+ .ToList();
+ Assert.That(details.Select(d => d.ID), Is.EquivalentTo(rows.Select(r => (long) r.ItemArray[0])),
+ "IDs don't match");
+ Assert.That(details.Select(d => d.TargetQueryExpression), Is.EquivalentTo(rows.Select(r => r.ItemArray[1])),
+ "TargetQueryExpression doesn't match");
+ Assert.That(details.Select(d => d.EvaluationDate), Is.EquivalentTo(rows.Select(r => r.ItemArray[2])),
+ "EvaluationDate doesn't match");
+ Assert.That(details.Select(d => d.Result), Is.EquivalentTo(rows.Select(r => r.ItemArray[3])),
+ "Result doesn't match");
+ Assert.That(details.Select(d => d.ResultDetail), Is.EquivalentTo(rows.Select(r => r.ItemArray[4])),
+ "ResultDetail doesn't match");
+ }
+ }
+}
diff --git a/src/FunctionalTest/Smo/DMF/PolicyTests.cs b/src/FunctionalTest/Smo/DMF/PolicyTests.cs
new file mode 100644
index 00000000..b1395da4
--- /dev/null
+++ b/src/FunctionalTest/Smo/DMF/PolicyTests.cs
@@ -0,0 +1,420 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System.Diagnostics;
+using System.Linq;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Dmf;
+using Microsoft.SqlServer.Management.Facets;
+using Microsoft.SqlServer.Management.Sdk.Sfc;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils;
+using TraceHelper = Microsoft.SqlServer.Test.Manageability.Utils.Helpers.TraceHelper;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using Assert = NUnit.Framework.Assert;
+using System.Data;
+using System.Xml;
+
+namespace Microsoft.SqlServer.Test.SMO.DMF
+{
+ ///
+ /// Tests for DMF Policies. Since we're unlikely to hotfix PBM/SMO for SqlClr on old versions
+ /// of SQL we're only testing against v12+
+ ///
+ [TestClass]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlOnDemand)]
+ public partial class PolicyTests : SqlTestBase
+ {
+ ///
+ /// Tests that we can successfully create a Policy for a table, execute it and
+ /// have it pass successfully.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 16)]
+ public void Policy_CanCreateAndExecuteTablePolicy()
+ {
+ this.ExecuteWithDbDrop(
+ database =>
+ {
+ Condition policyCondition = null, tableNameCondition = null, databaseNameCondition = null;
+ ObjectSet objectSet = null;
+ Policy policy = null;
+ try
+ {
+ var table = database.CreateTable(this.TestMethod.Name);
+
+ var policyStore = new PolicyStore(
+ new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+
+ //Create the condition that the Policy is going to use
+ policyCondition = policyStore.CreateCondition(
+ facet: "Table",
+ expressionNodeExpression: "@IsSystemObject = False()",
+ conditionNamePrefix: "Table_IsSystemObject_Condition");
+
+ //Create the condition we'll use to filter out the table name for the object set
+ tableNameCondition = policyStore.CreateCondition(
+ facet: "Table",
+ expressionNodeExpression: string.Format("@Name = '{0}'", Urn.EscapeString(table.Name)),
+ conditionNamePrefix: "Table_Name_Condition");
+
+ //Create the condition we'll use to filter out the database name for the object set
+ databaseNameCondition = policyStore.CreateCondition(
+ facet: "Database",
+ expressionNodeExpression: string.Format("@Name = '{0}'", Urn.EscapeString(database.Name)),
+ conditionNamePrefix: "Database_Name_Condition");
+
+ //Create the object set so that we only run this policy against the specific table
+ //created for this test (evaluating objects outside the control of the test makes it
+ //difficult to determine a consistent outcome)
+ objectSet = policyStore.CreateObjectSet(
+ facet: "Table",
+ targetSetsAndLevelConditions: new Dictionary>>()
+ {
+ {
+ "Server/Database/Table",
+ new [] {
+ new Tuple("Server/Database", databaseNameCondition.Name),
+ //Only evaluate on Tables with the specific name of the one we created
+ new Tuple("Server/Database/Table", tableNameCondition.Name),
+ }
+ } },
+ objectSetNamePrefix: this.TestMethod.Name);
+
+ policy = policyStore.CreatePolicy(
+ condition: policyCondition.Name,
+ policyEvaluationMode: AutomatedPolicyEvaluationMode.None,
+ objectSet: objectSet.Name,
+ policyNamePrefix: this.TestMethod.Name);
+
+ TraceHelper.TraceInformation("Evaluating Policy {0} in PolicyStore {1}", policy.Name, policyStore.Name);
+ var result = policy.Evaluate(AdHocPolicyEvaluationMode.Check, policyStore.SqlStoreConnection);
+ var policies = policyStore.EnumApplicablePolicies(new SfcQueryExpression(table.Urn.Value));
+ var facetPolicies = policyStore.EnumPoliciesOnFacet(nameof(Table), PolicyStore.EnumerationMode.All);
+ var facetConditions = policyStore.EnumConditionsOnFacet(nameof(Table), PolicyStore.EnumerationMode.All);
+ var targetConditions = policyStore.EnumTargetSetConditions(typeof(Database), PolicyStore.EnumerationMode.All);
+ Assert.Multiple(() =>
+ {
+ Assert.That(policy.EvaluationHistories.Count, Is.EqualTo(1), "Should have 1 evaluation history");
+ Assert.That(result, Is.True, "The Policy should have evaluated to true");
+ Assert.That(policies.Rows.Cast().Select(r => r[nameof(Policy.Name)]), Has.Member(policy.Name), "EnumApplicablePolicies should include the policy");
+ Assert.That(facetPolicies.Cast(), Has.Member(policy.Name), "EnumFacetPolicies should include the policy");
+ Assert.That(facetConditions.Cast(), Has.Member(tableNameCondition.Name), "EnumFacetConditions should include the condition");
+ Assert.That(targetConditions.Cast(), Has.Member(databaseNameCondition.Name), "EnumFacetConditions should include the condition");
+ Assert.That(policy.IsSystemObject, Is.False, "policy.IsSystemObject");
+ });
+ policyStore.MarkSystemObject(policy, marker: true);
+ policyStore.MarkSystemObject(databaseNameCondition, marker: true);
+ policyStore.MarkSystemObject(objectSet, marker: true);
+ databaseNameCondition.Refresh();
+ objectSet.Refresh();
+ policyStore.Policies.Refresh(refreshChildObjects: true);
+ var nonSystemConditions = policyStore.EnumTargetSetConditions(typeof(Database), PolicyStore.EnumerationMode.NonSystemOnly);
+ var systemConditions = policyStore.EnumTargetSetConditions(typeof(Database), PolicyStore.EnumerationMode.SystemOnly);
+ facetPolicies = policyStore.EnumPoliciesOnFacet(nameof(Table), PolicyStore.EnumerationMode.SystemOnly);
+ facetConditions = policyStore.EnumConditionsOnFacet(nameof(Table), PolicyStore.EnumerationMode.NonSystemOnly);
+ Assert.Multiple(() =>
+ {
+ Assert.That(policy.IsSystemObject, Is.True, "policy.IsSystemObject after MarkSystemObject");
+ Assert.That(databaseNameCondition.IsSystemObject, Is.True, "condition.IsSystemObject after MarkSystemObject");
+ Assert.That(objectSet.IsSystemObject, Is.True, "objectSet.IsSystemObject after MarkSystemObject");
+ Assert.That(systemConditions.Cast(), Has.Member(databaseNameCondition.Name), "system Conditions should include the condition");
+ Assert.That(nonSystemConditions.Cast(), Has.No.Member(databaseNameCondition.Name), "nonsystem Conditions should exclude the condition");
+ Assert.That(facetPolicies.Cast(), Has.Member(policy.Name), "EnumFacetPolicies SystemOnly should include the policy");
+ Assert.That(facetConditions.Cast(), Has.Member(tableNameCondition.Name), "EnumFacetConditions NonSystemOnly should include the condition");
+ });
+ }
+ finally
+ {
+ //Clean up test objects, note the order is important since the
+ //policy uses the object set and a condition and the object set
+ //uses the conditions
+ SmoObjectHelpers.SafeDrop(policy, objectSet, policyCondition, tableNameCondition, databaseNameCondition);
+ }
+
+ });
+
+ }
+
+ ///
+ /// SMO versions on server 2016 and prior have a defect handling database names with quotes
+ /// in this scenario, so just testing on 2017 and newer (see bug 9731281)
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 14, MaxMajor = 14, HostPlatform = "Windows")]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.Express)]
+ public void Server_blocks_sproc_creation_based_on_policy_140()
+ {
+ Test_creation_policy_impl();
+ }
+
+ ///
+ /// Regression test for 12143605
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15, HostPlatform = "Windows")]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.Express)]
+ public void Server_blocks_sproc_creation_based_on_policy_150_plus()
+ {
+ Test_creation_policy_impl();
+ }
+
+ private void Test_creation_policy_impl()
+ {
+ Condition policyCondition = null, databaseNameCondition = null;
+ ObjectSet objectSet = null;
+ Policy policy = null;
+ ExecuteWithDbDrop((db) =>
+ {
+
+ var policyStore = new PolicyStore(
+ new SqlStoreConnection(
+ new SqlConnection(this.SqlConnectionStringBuilder.ConnectionString)));
+ try
+ {
+ // Condition for sprocs with name not starting with sp
+ policyCondition = policyStore.CreateCondition(
+ facet: "StoredProcedure",
+ expressionNodeExpression: "@Name != 'spBlock'",
+ conditionNamePrefix: "spNameCondition");
+
+ //Create the condition we'll use to filter out the database name for the object set
+ databaseNameCondition = policyStore.CreateCondition(
+ facet: "Database",
+ expressionNodeExpression: string.Format("@Name = '{0}'", Urn.EscapeString(db.Name)),
+ conditionNamePrefix: "Database_Name_Condition");
+
+ // scope evaluation to just our database
+ objectSet = policyStore.CreateObjectSet(
+ facet: "StoredProcedure",
+ targetSetsAndLevelConditions: new Dictionary>>()
+ {
+ {
+ "Server/Database/StoredProcedure",
+ new[] {new Tuple("Server/Database", databaseNameCondition.Name),}
+ }
+ });
+
+ policy = policyStore.CreatePolicy(policyCondition.Name, AutomatedPolicyEvaluationMode.Enforce,
+ objectSet.Name, policyNamePrefix: "sproc_block_policy");
+ policy.Enabled = true;
+ policy.Alter();
+ var sproc = new StoredProcedure(db, "nameAllowed") { TextBody = "print 12" };
+ sproc.TextHeader = String.Format("CREATE PROCEDURE {0} AS", SmoObjectHelpers.SqlBracketQuoteString(sproc.Name));
+ Assert.DoesNotThrow(sproc.Create, "nameAllowed should be created");
+ var policyString = new System.Text.StringBuilder();
+ policyStore.CreatePolicyFromFacet(sproc, nameof(StoredProcedure), "policy", "condition", XmlWriter.Create(policyString));
+ Assert.That(policyString.ToString(), Is.Not.Empty, "policy xml");
+ var objectPolicy = policyStore.DeserializePolicy(XmlReader.Create(new System.IO.StringReader(policyString.ToString())), overwriteExistingPolicy: true, overwriteExistingCondition: true);
+ Assert.Multiple(() =>
+ {
+ Assert.That(objectPolicy.UsesFacet(nameof(StoredProcedure)), Is.True, "UsesFacet");
+ Assert.That(objectPolicy.Name, Is.EqualTo("policy"), "deserialized policy name");
+ Assert.That(objectPolicy.Condition, Is.EqualTo("condition"), "deserialized policy condition");
+ });
+ var policySet = policyStore.EnumApplicablePolicies(new SfcQueryExpression(sproc.Urn));
+ Assert.That(policySet.Rows.Cast().Select(r => r["Name"]), Is.EqualTo(new[] { policy.Name }), "EnumApplicablePolicies");
+ sproc = new StoredProcedure(db, "spBlock") { TextBody = "print 1" };
+ sproc.TextHeader = String.Format("CREATE PROCEDURE {0} AS", SmoObjectHelpers.SqlBracketQuoteString(sproc.Name));
+ var e = Assert.Throws(sproc.Create, "spBlock should fail");
+ Exception innermostException = e;
+ while (innermostException.InnerException != null)
+ {
+ innermostException = innermostException.InnerException;
+ }
+ Assert.That(innermostException, Is.InstanceOf(typeof(SqlException)), "InnerException:{0}", innermostException);
+ var sqlException = (SqlException)innermostException;
+ Assert.That(sqlException.Number, Is.EqualTo(3609), "SqlException.Number");
+ Assert.That(policy.EvaluationHistories.Count, Is.EqualTo(2), "EvaluationHistories.Count");
+
+ }
+ finally
+ {
+ //Clean up test objects, note the order is important since the
+ //policy uses the object set and a condition and the object set
+ //uses the conditions
+ SmoObjectHelpers.SafeDrop(policy, objectSet, policyCondition, databaseNameCondition);
+ }
+
+ });
+ }
+
+ ///
+ /// Make sure we can create a Condition for each Facet.
+ /// We handle failures related to new facets that don't exist on old versions
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 12)]
+ public void Facets_are_all_assignable_to_a_condition()
+ {
+ ExecuteTest(() =>
+ {
+ Assert.Multiple(() =>
+ {
+ var ps = new PolicyStore
+ {
+ SqlStoreConnection = new SqlStoreConnection(ServerContext.ConnectionContext.SqlConnectionObject)
+ };
+
+ foreach (Type facet in FacetRepository.RegisteredFacets)
+ {
+ var drop = false;
+ var f = facet; // avoid loop variable capture
+ var pi = FacetRepository.GetFacetProperties(f)[0];
+ var value = GenerateValue(pi.PropertyType);
+ Trace.TraceInformation("Property: '{0}'; type: '{1}'; value: '{2}'", pi.Name, pi.PropertyType, value);
+ ExpressionNode node = new ExpressionNodeOperator(OperatorType.EQ,
+ new ExpressionNodeAttribute(pi.Name, f),
+ ExpressionNodeConstant.ConstructNode(value));
+ var name = "con" + pi.Name + Guid.NewGuid();
+ var c = new Condition(ps, name)
+ {
+ ExpressionNode = node,
+ Facet = f.Name
+ };
+ try
+ {
+ c.Create();
+ drop = true;
+ Assert.That(ps.Conditions.Select(cond => cond.Name), Has.Member(c.Name),
+ "PolicyStore.Conditions should have new Condition after Create");
+ }
+ catch (SfcCRUDOperationFailedException ef)
+ {
+ Trace.TraceInformation("Facet: {0} Server:{1} Error: {2}", f.Name, ps.Name, ef);
+ Assert.That(ef.InnerException, Is.InstanceOf(typeof(ExecutionFailureException)), "ef.InnerException");
+
+ if (ef.InnerException is ExecutionFailureException ex)
+ {
+ Assert.That(ex.InnerException, Is.InstanceOf(typeof(SqlException)), "Only SqlException should be thrown");
+ if (ex.InnerException is SqlException sqlEx)
+ {
+ Assert.That(sqlEx.Number, Is.EqualTo(34014), "Only Facet doesn't exist should be thrown");
+ }
+ }
+ }
+ finally
+ {
+ if (drop)
+ {
+ c.Drop();
+ }
+ }
+ }
+ });
+ });
+ }
+
+ ///
+ /// Create a test value of the given type
+ ///
+ ///
+ ///
+ public static object GenerateValue(Type type)
+ {
+ object value = null;
+
+ if (type.IsEnum)
+ {
+ value = 1;
+ }
+ else if (type == typeof(Boolean))
+ {
+ value = true;
+ }
+ else if (type == typeof(int))
+ {
+ value = 1;
+ }
+ else if (type == typeof(byte))
+ {
+ value = (byte)2;
+ }
+ else if (type == typeof(short))
+ {
+ value = (short)3;
+ }
+ else if (type == typeof(long))
+ {
+ value = (long)4;
+ }
+ else if (type == typeof(double))
+ {
+ value = (double)5.0;
+ }
+ else if (type == typeof(float))
+ {
+ value = (float)6.0;
+ }
+ else if (type == typeof(decimal))
+ {
+ value = (decimal)7.0;
+ }
+ else if (type == typeof(string))
+ {
+ value = "string";
+ }
+ else if (type == typeof(char))
+ {
+ value = 'c';
+ }
+ else if (type == typeof(DateTime))
+ {
+ value = DateTime.Parse("11/6/207 12:04");
+ }
+ else if (type == typeof(System.Guid))
+ {
+ value = System.Guid.Empty;
+ }
+ else if (type == typeof(int[]))
+ {
+ value = new int[] { 1, 2, 3 };
+ }
+ else if (type == typeof(Byte[]))
+ {
+ value = new Byte[] { 4, 5, 6 };
+ }
+ else if (type == typeof(short[]))
+ {
+ value = new short[] { 7, 8, 9 };
+ }
+ else if (type == typeof(long[]))
+ {
+ value = new long[] { 10, 11, 12 };
+ }
+ else if (type == typeof(double[]))
+ {
+ value = new double[] { 13.0, 14.0, 15.0 };
+ }
+ else if (type == typeof(float[]))
+ {
+ value = new float[] { 16.0f, 17.0f, 18.0f };
+ }
+ else if (type == typeof(decimal[]))
+ {
+ value = new decimal[] { 19.0m, 20.0m, 21.0m };
+ }
+ else if (type == typeof(string[]))
+ {
+ value = new string[] { "str1", "str2", "str3" };
+ }
+ else if (type == typeof(char[]))
+ {
+ value = new char[] { 'a', 'b', 'c' };
+ }
+
+ return value;
+ }
+
+ }
+}
diff --git a/src/FunctionalTest/Smo/DMF/SfcXmlHelper.cs b/src/FunctionalTest/Smo/DMF/SfcXmlHelper.cs
new file mode 100644
index 00000000..a9df70dd
--- /dev/null
+++ b/src/FunctionalTest/Smo/DMF/SfcXmlHelper.cs
@@ -0,0 +1,40 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System.Xml;
+
+namespace Microsoft.SqlServer.Test.SMO.DMF
+{
+ ///
+ /// Helper methods for SFC XML functionality
+ ///
+ public static class SfcXmlHelper
+ {
+ // Uris for schema namespaces in the DMF xml
+ private static string DmfUri = "http://schemas.microsoft.com/sqlserver/DMF/2007/08";
+ private static string SfcUri = "http://schemas.microsoft.com/sqlserver/sfc/serialization/2007/08";
+ private static string SmlUri = "http://schemas.serviceml.org/sml/2007/02";
+ private static string SmlifUri = "http://schemas.serviceml.org/smlif/2007/02";
+ private static string XsUri = "http://www.w3.org/2001/XMLSchema";
+
+ ///
+ /// The namespace manager is required for the XPathNavigator to find XML elements that are in a namespace.
+ ///
+ ///
+ ///
+ public static XmlNamespaceManager GetXmlNsManager(XmlDocument xmlDoc)
+ {
+ // The namespace prefix defined here does not have to be the same as the prefix that is actually used in the file
+ // For smlif we will use 's' because it makes the Xpath queries shorter and easier to read
+ XmlNamespaceManager xmlnsManager = new XmlNamespaceManager(xmlDoc.NameTable);
+ xmlnsManager.AddNamespace("dmf", SfcXmlHelper.DmfUri);
+ xmlnsManager.AddNamespace("DMF", SfcXmlHelper.DmfUri);
+ xmlnsManager.AddNamespace("sfc", SfcXmlHelper.SfcUri);
+ xmlnsManager.AddNamespace("sml", SfcXmlHelper.SmlUri);
+ xmlnsManager.AddNamespace("s", SfcXmlHelper.SmlifUri);
+ xmlnsManager.AddNamespace("xs", SfcXmlHelper.XsUri);
+
+ return xmlnsManager;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Smo/GeneralFunctionality/AuditSmoTests.cs b/src/FunctionalTest/Smo/GeneralFunctionality/AuditSmoTests.cs
new file mode 100644
index 00000000..214a6f99
--- /dev/null
+++ b/src/FunctionalTest/Smo/GeneralFunctionality/AuditSmoTests.cs
@@ -0,0 +1,451 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Data;
+using System.Linq;
+using System.Reflection;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using _SMO = Microsoft.SqlServer.Management.Smo;
+using Assert = NUnit.Framework.Assert;
+
+namespace Microsoft.SqlServer.Test.SMO.GeneralFunctionality
+{
+ ///
+ ///
+ ///
+ [TestClass]
+ public class AuditSmoTests : SqlTestBase
+ {
+ ///
+ /// Server audit with SECURITY_LOG destination must be successfully created.
+ /// Empty or Unknown destination must throw error upon creating server audit.
+ ///
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15, MaxMajor = 15)]
+ public void ServerAudit_VerifySecurityLogAndInvalidDestinations()
+ {
+ var auditNameSecurityLog = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTestSecurityLog");
+ var auditNameInvalid = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTestInvalid");
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ Audit serverAudit = new Audit(server, auditNameSecurityLog)
+ {
+ DestinationType = AuditDestinationType.SecurityLog
+ };
+
+ serverAudit.Create();
+ serverAudit.Drop();
+
+ var exception = Assert.Throws(() =>
+ {
+ Audit serverAuditInvalid = new Audit(server, auditNameInvalid);
+
+ serverAuditInvalid.Create();
+ });
+
+ Assert.IsInstanceOf<_SMO.PropertyNotSetException>(exception.InnerException, "Unexpected inner exception");
+ Assert.That(exception.InnerException.Message, Is.EqualTo(ExceptionTemplates.PropertyNotSetExceptionText("DestinationType")), "Unexpected exception message");
+
+ exception = Assert.Throws(() =>
+ {
+ Audit serverAuditInvalid = new Audit(server, auditNameInvalid)
+ {
+ DestinationType = AuditDestinationType.Unknown
+ };
+
+ serverAuditInvalid.Create();
+ });
+
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception");
+ Assert.That(exception.InnerException.Message, Is.EqualTo(ExceptionTemplates.UnknownEnumeration("DestinationType")), "Unexpected exception message");
+ });
+ }
+
+ ///
+ /// Audit object is not supported in SQL azure, this test verify the audit SMO object is not available in azure.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase)]
+ public void ServerAudit_VerifyAuditObjectIsNotSupportedInAzure()
+ {
+ var auditName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var exception = Assert.Throws(() =>
+ {
+ Audit serverAudit = new Audit(server, auditName);
+ });
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception");
+ Assert.IsInstanceOf(exception.InnerException.InnerException, "Unexpected inner exception");
+
+ string errorMsg = server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand ? ExceptionTemplates.NotSupportedOnOnDemandWithDetails(typeof(Audit).Name) :
+ ExceptionTemplates.NotSupportedOnCloudWithDetails(typeof(Audit).Name);
+
+ Assert.That(exception.InnerException.InnerException.Message, Is.EqualTo(errorMsg), "Unexpected exception message");
+ });
+ }
+
+ ///
+ /// Audit object with URL target is only available for managed instance servers, this test verify URL target is not supported
+ /// in non managed instance servers.
+ ///
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ public void ServerAudit_TestUrlTargetOnlyAvailableInManagedInstance()
+ {
+ const string auditName = "dummyAuditName";
+ const string blobPath = "https://dummystorage.blob.core.windows.net/sqldbauditlog/";
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var exception = Assert.Throws(() =>
+ {
+ Audit serverAudit = new Audit(server, auditName)
+ {
+ DestinationType = AuditDestinationType.Url,
+ FilePath = blobPath
+ };
+
+ serverAudit.Create();
+ });
+
+ if (server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand || server.DatabaseEngineType == DatabaseEngineType.SqlAzureDatabase)
+ {
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception for SqlOnDemand");
+
+ Exception sqlOnDemandError = exception.InnerException.InnerException;
+ Assert.IsInstanceOf(sqlOnDemandError, "Unexpected error for SqlOnDemand");
+
+ string errorMsg = server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand ? ExceptionTemplates.NotSupportedOnOnDemandWithDetails(typeof(Audit).Name) :
+ ExceptionTemplates.NotSupportedOnCloudWithDetails(typeof(Audit).Name);
+
+ Assert.That(sqlOnDemandError.Message, Is.EqualTo(errorMsg), "Unexpected exception message");
+ }
+ else
+ {
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception");
+ Assert.That(exception.InnerException.Message,
+ Is.EqualTo(ExceptionTemplates.InvalidPropertyValueForVersion(typeof(Audit).Name,
+ "DestinationType", "Url", server.GetSqlServerVersionName())),
+ "Unexpected exception message");
+ }
+ });
+ }
+
+ ///
+ /// Audit object with EXTERNAL_MONITOR target is only available for managed instance servers, this test verify EXTERNAL_MONITOR target is not supported
+ /// in non managed instance servers.
+ ///
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ public void ServerAudit_TestExternalMonitorTargetOnlyAvailableInManagedInstance()
+ {
+ const string auditName = "dummyAuditName";
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var exception = Assert.Throws(() =>
+ {
+ Audit serverAudit = new Audit(server, auditName)
+ {
+ DestinationType = AuditDestinationType.ExternalMonitor
+ };
+
+ serverAudit.Create();
+ });
+
+ if (server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand || server.DatabaseEngineType == DatabaseEngineType.SqlAzureDatabase)
+ {
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception for SqlOnDemand");
+
+ Exception sqlOnDemandError = exception.InnerException.InnerException;
+ Assert.IsInstanceOf(sqlOnDemandError, "Unexpected error for SqlOnDemand");
+
+ string errorMsg = server.DatabaseEngineEdition == DatabaseEngineEdition.SqlOnDemand ? ExceptionTemplates.NotSupportedOnOnDemandWithDetails(typeof(Audit).Name) :
+ ExceptionTemplates.NotSupportedOnCloudWithDetails(typeof(Audit).Name);
+
+ Assert.That(sqlOnDemandError.Message, Is.EqualTo(errorMsg), "Unexpected exception message");
+ }
+ else
+ {
+ Assert.IsInstanceOf(exception.InnerException, "Unexpected inner exception");
+ Assert.That(exception.InnerException.Message,
+ Is.EqualTo(ExceptionTemplates.InvalidPropertyValueForVersion(typeof(Audit).Name,
+ "DestinationType", "ExternalMonitor", server.GetSqlServerVersionName())),
+ "Unexpected exception message");
+ }
+ });
+ }
+
+ ///
+ /// Verifies audit object with EXTERNAL_MONITOR destination type can be successfully created
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, Edition = DatabaseEngineEdition.SqlManagedInstance)]
+ public void ServerAudit_TestExternalMonitorTargetForManagedInstance()
+ {
+ var auditName1 = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+ var auditName2 = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+ var auditName3 = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ Audit serverAudit1 = new Audit(server, auditName1)
+ {
+ DestinationType = AuditDestinationType.ExternalMonitor,
+ OnFailure = OnFailureAction.Continue,
+ IsOperator = true
+ };
+
+ Audit serverAudit2 = new Audit(server, auditName2)
+ {
+ DestinationType = AuditDestinationType.ExternalMonitor,
+ IsOperator = false
+ };
+
+ Audit serverAudit3 = new Audit(server, auditName3)
+ {
+ DestinationType = AuditDestinationType.ExternalMonitor
+ };
+
+ try
+ {
+ serverAudit1.Create();
+ serverAudit2.Create();
+ serverAudit3.Create();
+
+ serverAudit1.Refresh();
+ serverAudit2.Refresh();
+ serverAudit3.Refresh();
+
+ Assert.Multiple(() =>
+ {
+ Assert.That(serverAudit1.DestinationType, Is.EqualTo(AuditDestinationType.ExternalMonitor), $"Destinationtype of {serverAudit1.Name}");
+ Assert.That(serverAudit1.OnFailure, Is.EqualTo(OnFailureAction.Continue), $"OnFailure {serverAudit1.Name}");
+ Assert.That(serverAudit1.IsOperator, Is.True, $"IsOperator {serverAudit1.Name}");
+ Assert.That(server.Audits, Has.Member(serverAudit1), $"Server's audit collection should contain created audit {serverAudit1.Name}");
+ });
+
+ Assert.Multiple(() =>
+ {
+ Assert.That(serverAudit2.DestinationType, Is.EqualTo(AuditDestinationType.ExternalMonitor), $"Destinationtype {serverAudit2.Name}");
+ Assert.That(serverAudit2.IsOperator, Is.False, $"IsOperator {serverAudit2.Name}");
+ Assert.That(server.Audits, Has.Member(serverAudit2), $"Server's audit collection should contain created audit {serverAudit2.Name}");
+ });
+
+ Assert.Multiple(() =>
+ {
+ Assert.That(serverAudit3.DestinationType, Is.EqualTo(AuditDestinationType.ExternalMonitor), $"Destinationtype {serverAudit3.Name}");
+ Assert.That(serverAudit3.IsOperator, Is.False, $"IsOperator {serverAudit3.Name}");
+ Assert.That(server.Audits, Has.Member(serverAudit3), $"Server's audit collection should contain created audit {serverAudit3.Name}");
+ });
+
+ Assert.Multiple(() =>
+ {
+ serverAudit3.IsOperator = true;
+
+ serverAudit3.Alter();
+ serverAudit3.Refresh();
+
+ Assert.That(serverAudit3.IsOperator, Is.True, $"Alter IsOperator to true {serverAudit3.Name}");
+
+ serverAudit3.IsOperator = false;
+
+ serverAudit3.Alter();
+ serverAudit3.Refresh();
+
+ Assert.That(serverAudit3.IsOperator, Is.False, $"Alter IsOperator to false {serverAudit3.Name}");
+ });
+ }
+ finally
+ {
+ while (server.Audits.Count > 0)
+ {
+ server.Audits[0].DropIfExists();
+ }
+ }
+ });
+ }
+
+ ///
+ /// Batch Auditing is only Supported in SQLv150 and managed instance
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 11, MaxMajor = 14)]
+ public void ServerAuditspecifications_ExceptionIsThrownForUnspportedAuditGroup()
+ {
+ var auditName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+
+ ExecuteTest(() =>
+ {
+
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var serverAudit = new Audit(server, auditName)
+ {
+ DestinationType = AuditDestinationType.ApplicationLog
+ };
+ serverAudit.Create();
+
+ ServerAuditSpecification serverAuditSpec = new ServerAuditSpecification(server, "Test Audit Specification");
+ try
+ {
+ serverAuditSpec.AuditName = auditName;
+ serverAuditSpec.AddAuditSpecificationDetail(new AuditSpecificationDetail(AuditActionType.BatchStartedGroup));
+ serverAuditSpec.AddAuditSpecificationDetail(new AuditSpecificationDetail(AuditActionType.BatchCompletedGroup));
+
+
+ var exception = Assert.Throws(() =>
+ {
+ serverAuditSpec.Create();
+ });
+ }
+ finally
+ {
+ serverAudit.Drop();
+ }
+ });
+ }
+
+ ///
+ /// Batch Auditing is only Supported in SQLv150 and managed instance
+ ///
+ [TestMethod]
+ //[SupportedTargetServerFriendlyName("Sqlv150")]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15)]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ public void ServerAuditspecifications_VerifySupportForBatchAuditingActionType()
+ {
+ var auditName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+ var auditSpecificationName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditSpecificationTest");
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var serverAudit = new Audit(server, auditName)
+ {
+ DestinationType = AuditDestinationType.ApplicationLog
+ };
+ serverAudit.Create();
+
+ ServerAuditSpecification serverAuditSpec = new ServerAuditSpecification(server, auditSpecificationName);
+ try
+ {
+ serverAuditSpec.AuditName = auditName;
+ serverAuditSpec.AddAuditSpecificationDetail(new AuditSpecificationDetail(AuditActionType.BatchStartedGroup));
+ serverAuditSpec.AddAuditSpecificationDetail(new AuditSpecificationDetail(AuditActionType.BatchCompletedGroup));
+
+ Assert.DoesNotThrow(() =>
+ {
+ serverAuditSpec.Create();
+ serverAuditSpec.Drop();
+ }, "Batch Auditing should be supported on SQL 2019");
+ }
+ finally
+ {
+ try { serverAudit.Drop(); } catch { }
+ }
+ });
+ }
+
+ ///
+ /// SERVER_PERMISSION_CHANGE_GROUP Auditing should work in SMO
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone)]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ public void ServerAuditspecifications_I_Should_Be_Able_To_Create_And_Script_A_ServerAuditSpecification_For_SERVER_PERMISSION_CHANGE_GROUP()
+ {
+ var auditName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditTest");
+ var auditSpecificationName = SmoObjectHelpers.GenerateUniqueObjectName("SmoAuditSpecificationTest");
+
+ ExecuteTest(() =>
+ {
+ var server = new _SMO.Server(this.ServerContext.ConnectionContext);
+
+ var serverAudit = new Audit(server, auditName) { DestinationType = AuditDestinationType.ApplicationLog };
+
+ var serverAuditSpec = new ServerAuditSpecification(server, auditSpecificationName) { AuditName = auditName };
+
+ try
+ {
+ serverAudit.Create();
+
+ serverAuditSpec.AddAuditSpecificationDetail(new AuditSpecificationDetail(AuditActionType.ServerPermissionChangeGroup));
+
+ Assert.DoesNotThrow(() =>
+ {
+ // If this one throws, then something regressed...
+ serverAuditSpec.Create();
+
+ }, "Unexpected exception while creating audit specification 'SERVER_PERMISSION_CHANGE_GROUP'");
+
+ Assert.DoesNotThrow(() =>
+ {
+ // Likewise, this should not throw ,and the one and only string in the script should have this T-SQL fragment
+ Assert.That(serverAuditSpec.Script()[0], Contains.Substring("ADD (SERVER_PERMISSION_CHANGE_GROUP)"));
+ }, "Unexpected exception while scripting audit specification 'SERVER_PERMISSION_CHANGE_GROUP'");
+
+ }
+ finally
+ {
+ // It is benign to fail to clean-up
+ try { serverAuditSpec.Drop(); } catch { }
+ try { serverAudit.Drop(); } catch { }
+ }
+ });
+ }
+
+ ///
+ /// AuditActionType enum should contain all the values present in the database
+ ///
+ [TestMethod]
+ [UnsupportedHostPlatform(SqlHostPlatforms.Linux)]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlManagedInstance)]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15)]
+ public void ServerAuditspecifications_Audit_Action_Types_In_DB_Should_Match_Enum_Values()
+ {
+ ExecuteTest(() =>
+ {
+ var enumAttributeNames = GetTsqlSyntaxStringAttributeNames();
+ var query = @"Select DISTINCT name FROM master.sys.dm_audit_actions WHERE name like '%GROUP%'
+ UNION ALL SELECT name FROM (VALUES ('SELECT'),('UPDATE'),('INSERT'),('DELETE'),('EXECUTE'),('RECEIVE'),('REFERENCES')) actions(name)";
+
+ var dbAuditList = ServerContext.ConnectionContext.ExecuteWithResults(query).Tables[0].Rows.Cast().Select(row => row["name"].ToString());
+
+ // enumAttributeNames is a superset of dbAuditList.
+ Assert.That(enumAttributeNames, Is.SupersetOf(dbAuditList), @"Some types are missing in AuditActionType enum.
+ Please, update enum AuditActionType in /src/Microsoft/SqlServer/Management/SqlEnum/enumstructs.cs");
+ });
+ }
+
+ private static IEnumerable GetTsqlSyntaxStringAttributeNames()
+ {
+ var enumType = typeof(AuditActionType);
+ return from name in Enum.GetNames(enumType)
+ let valueAttributes = enumType.GetMember(name).First(m => m.DeclaringType == enumType).GetCustomAttributes(typeof(TsqlSyntaxStringAttribute), false)
+ select ((TsqlSyntaxStringAttribute)valueAttributes[0]).DisplayName;
+ }
+ }
+}
diff --git a/src/FunctionalTest/Smo/GeneralFunctionality/DataClassificationSmoTests.cs b/src/FunctionalTest/Smo/GeneralFunctionality/DataClassificationSmoTests.cs
new file mode 100644
index 00000000..a2c4bef3
--- /dev/null
+++ b/src/FunctionalTest/Smo/GeneralFunctionality/DataClassificationSmoTests.cs
@@ -0,0 +1,557 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Management.Smo;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.SqlServer.Test.SMO.ScriptingTests;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using _SMO = Microsoft.SqlServer.Management.Smo;
+using Assert = NUnit.Framework.Assert;
+
+namespace Microsoft.SqlServer.Test.SMO.GeneralFunctionality
+{
+ ///
+ ///
+ ///
+ [TestClass]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlDataWarehouse, DatabaseEngineEdition.SqlOnDemand)]
+ public class DataClassificationSmoTests : SmoObjectTestBase
+ {
+ ///
+ /// Verifies that table with classified columns is properly created
+ ///
+ [TestMethod]
+ public void DataClassification_TableCreate()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName ("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "'LabelName_A'", SensitivityLabelId = "'LabelId_A'", SensitivityInformationTypeName = "'InfoTypeName_A'", SensitivityInformationTypeId = "'InfoTypeId_A'" };
+ var column2 = new _SMO.Column(table, "b", _SMO.DataType.Int);
+ var column3 = new _SMO.Column(table, "c", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_C", SensitivityInformationTypeName = "InfoTypeName_C" };
+ var column4 = new _SMO.Column(table, "d", _SMO.DataType.Int);
+
+ var rank1 = SetSensitivityRank(column1, SensitivityRank.Low);
+ var rank3 = SetSensitivityRank(column3, SensitivityRank.High);
+
+ table.Columns.Add(column1);
+ table.Columns.Add(column2);
+
+ table.Create();
+
+ column3.Create();
+ column4.Create();
+
+ Assert.That(table.HasClassifiedColumn, Is.True, "'table' HasClassifiedColumn");
+ Assert.That(column1.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column1.SensitivityLabelName, Is.EqualTo("'LabelName_A'"), "'a' SensitivityLabelName");
+ Assert.That(column1.SensitivityLabelId, Is.EqualTo("'LabelId_A'"), "'a' SensitivityLabelId");
+ Assert.That(column1.SensitivityInformationTypeName, Is.EqualTo("'InfoTypeName_A'"), "'a' SensitivityInformationTypeName");
+ Assert.That(column1.SensitivityInformationTypeId, Is.EqualTo("'InfoTypeId_A'"), "'a' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(column1), Is.EqualTo(rank1), "'a' SensitivityRank");
+ Assert.That(column2.IsClassified, Is.False, "'b' IsClassified");
+ Assert.That(GetSensitivityRank(column2), Is.EqualTo(SensitivityRank.Undefined), "'b' SensitivityRank");
+ Assert.That(column3.IsClassified, Is.True, "'c' IsClassified");
+ Assert.That(column4.IsClassified, Is.False, "'d' IsClassified");
+ Assert.That(column2.SensitivityLabelId, Is.Empty, "'b' SensitivityLabelId");
+ Assert.That(column3.SensitivityLabelId, Is.Empty, "'c' SensitivityLabelId");
+ Assert.That(GetSensitivityRank(column3), Is.EqualTo(rank3), "'c' SensitivityRank");
+ Assert.That(column4.SensitivityInformationTypeId, Is.Empty, "'d' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(column4), Is.EqualTo(SensitivityRank.Undefined), "'d' SensitivityRank");
+ });
+ }
+
+ ///
+ /// Verifies that table with classified columns is properly altered
+ ///
+ [TestMethod]
+ public void DataClassification_TableAlter()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_A", SensitivityLabelId = "LabelId_A", SensitivityInformationTypeName = "InfoTypeName_A", SensitivityInformationTypeId = "InfoTypeId_A" });
+ table.Columns.Add(new _SMO.Column(table, "b", _SMO.DataType.Int));
+
+ SetSensitivityRank(table.Columns[0], SensitivityRank.Low);
+
+ table.Create();
+
+ var column1 = table.Columns["a"];
+ var column2 = table.Columns["b"];
+
+ column1.SensitivityLabelName = "'LabelName_A_Changed'";
+ column1.SensitivityLabelId = string.Empty;
+ column1.SensitivityInformationTypeName = "InfoTypeName_A_Changed";
+ column1.SensitivityInformationTypeId = string.Empty;
+ var rank1 = SetSensitivityRank(column1, SensitivityRank.Medium);
+
+ column1.Alter();
+
+ column2.SensitivityInformationTypeName = "InfoTypeName_B_Changed";
+
+ table.Alter();
+ column2.Refresh();
+
+ Assert.That(table.HasClassifiedColumn, Is.True, "'table' HasClassifiedColumn");
+ Assert.That(column1.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column2.IsClassified, Is.True, "'b' IsClassified");
+ Assert.That(column1.SensitivityLabelName, Is.EqualTo("'LabelName_A_Changed'"), "'a' SensitivityLabelName");
+ Assert.That(column1.SensitivityLabelId, Is.Empty, "'a' SensitivityLabelId");
+ Assert.That(column1.SensitivityInformationTypeName, Is.EqualTo("InfoTypeName_A_Changed"), "'a' SensitivityInformationTypeName");
+ Assert.That(column1.SensitivityInformationTypeId, Is.Empty, "'a' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(column1), Is.EqualTo(rank1), "'a' SensitivityRank");
+ Assert.That(column2.SensitivityInformationTypeName, Is.EqualTo("InfoTypeName_B_Changed"), "'b' SensitivityInformationTypeName");
+ });
+ }
+
+ ///
+ /// Verifies data classification is properly dropped
+ ///
+ [TestMethod]
+ public void DataClassification_Drop()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_A", SensitivityLabelId = "LabelId_A", SensitivityInformationTypeName = "InfoTypeName_A", SensitivityInformationTypeId = "InfoTypeId_A" });
+ table.Columns.Add(new _SMO.Column(table, "b", _SMO.DataType.Int));
+
+ SetSensitivityRank(table.Columns[0], SensitivityRank.High);
+
+ table.Create();
+
+ var column1 = table.Columns["a"];
+ var column2 = table.Columns["b"];
+
+ column1.RemoveClassification();
+ column1.Alter();
+ column1.Refresh();
+
+ // Verify bug when setting sensitivity option which was empty before to empty string.
+ // The drop was failed to server versions [10, 14]. TFS bug: 586347
+ column2.SensitivityLabelName = string.Empty;
+ column2.Alter();
+ column2.Refresh();
+
+ Assert.That(column1.IsClassified, Is.False, "'a' IsClassified");
+ Assert.That(column1.SensitivityLabelName, Is.Empty, "'a' SensitivityLabelName");
+ Assert.That(column1.SensitivityLabelId, Is.Empty, "'a' SensitivityLabelId");
+ Assert.That(column1.SensitivityInformationTypeName, Is.Empty, "'a' SensitivityInformationTypeName");
+ Assert.That(column1.SensitivityInformationTypeId, Is.Empty, "'a' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(column1), Is.EqualTo(SensitivityRank.Undefined), "'a' SensitivityRank");
+ });
+ }
+
+ ///
+ /// Verifies that column can be created with classified label id,
+ /// when classified label name is empty
+ /// Note: TASK 429910 Uncomment Azure and remove platform limitation once T45 will be deployed on Azure and Linux
+ ///
+ [TestMethod]
+ public void DataClassification_CreateColumnWithEmptyLabelName()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelId = "LabelId_A" };
+
+ table.Columns.Add(column);
+
+ table.Create();
+ table.Refresh();
+
+ Assert.That(column.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column.SensitivityLabelId, Is.EqualTo("LabelId_A"), "'a' SensitivityLabelId");
+ });
+ }
+
+ ///
+ /// Verifies that column can be created with classified information type id,
+ /// when classified information type name is empty
+ /// Note: TASK 429910 Uncomment Azure and remove platform limitation once T45 will be deployed on Azure and Linux
+ ///
+ [TestMethod]
+ public void DataClassification_CreateColumnWithEmptyInformationTypeName()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int));
+ table.Create();
+
+ var column = new _SMO.Column(table, "b", _SMO.DataType.Int) { SensitivityInformationTypeId = "InfoTypeId_B" };
+
+ column.Create();
+ column.Refresh();
+
+ Assert.That(column.IsClassified, Is.True, "'b' IsClassified");
+ Assert.That(column.SensitivityInformationTypeId, Is.EqualTo("InfoTypeId_B"), "'b' SensitivityInformationTypeId");
+ });
+ }
+
+ ///
+ /// Verifies alter succeeds when setting classified label name to empty value,
+ /// but classified label id is not empty
+ /// Note: TASK 429910 Uncomment Azure and remove platform limitation once T45 will be deployed on Azure and Linux
+ ///
+ [TestMethod]
+ public void DataClassification_AlterColumnWithEmptyLabelName()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_A", SensitivityLabelId = "LabelId_A" });
+ table.Create();
+
+ var column = table.Columns["a"];
+
+ column.SensitivityLabelName = string.Empty;
+
+ table.Alter();
+ table.Refresh();
+
+ Assert.That(column.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column.SensitivityLabelId, Is.EqualTo("LabelId_A"), "'a' SensitivityLabelId");
+ });
+ }
+
+ ///
+ /// Verifies alter succeeds when setting classified information type name to empty value,
+ /// but classified information type id is not empty
+ /// Note: TASK 429910 Uncomment Azure and remove platform limitation once T45 will be deployed on Azure and Linux
+ ///
+ [TestMethod]
+ public void DataClassification_AlterColumnWithEmptyInformationTypeName()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityInformationTypeName = "InfoTypeName_A", SensitivityInformationTypeId = "InfoTypeId_A" };
+
+ table.Columns.Add(column);
+ table.Create();
+
+ column.SensitivityInformationTypeName = string.Empty;
+
+ column.Alter();
+ column.Refresh();
+
+ Assert.That(column.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column.SensitivityInformationTypeId, Is.EqualTo("InfoTypeId_A"), "'a' SensitivityInformationTypeId");
+ });
+ }
+
+ ///
+ /// Verifies that data classification is not supported for created computed column
+ ///
+ [TestMethod]
+ public void DataClassification_ComputedColumnCreate()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int));
+ table.Columns.Add(new _SMO.Column(table, "b", _SMO.DataType.Int));
+ table.Columns.Add(new _SMO.Column(table, "c", _SMO.DataType.Int) { Computed = true, ComputedText = "a * b", SensitivityLabelName = "LabelName_C", SensitivityLabelId = "LabelId_C", SensitivityInformationTypeName = "InfoTypeName_C", SensitivityInformationTypeId = "InfoTypeId_C" });
+
+ SmoTestsUtility.AssertInnerException(() => table.Create(), ExceptionTemplates.NoDataClassificationOnComputedColumns);
+ });
+ }
+
+ ///
+ /// Verifies that data classification is not supported for altered computed column
+ ///
+ [TestMethod]
+ public void DataClassification_ComputedColumnAlter()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int));
+ table.Columns.Add(new _SMO.Column(table, "b", _SMO.DataType.Int));
+ table.Columns.Add(new _SMO.Column(table, "c", _SMO.DataType.Int) { Computed = true, ComputedText = "a * b" });
+
+ table.Create();
+
+ var column = table.Columns["c"];
+
+ column.SensitivityLabelName = "LabelName_C";
+ column.SensitivityLabelId = "LabelId_C";
+ column.SensitivityInformationTypeName = "InfoTypeName_C";
+ column.SensitivityInformationTypeId = "InfoTypeId_C";
+
+ SmoTestsUtility.AssertInnerException(() => column.Alter(), ExceptionTemplates.NoDataClassificationOnComputedColumns);
+ });
+ }
+
+ ///
+ /// Verifies that column becames classified by setting its sensitivity rank only
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase, MinMajor = 12)]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15)]
+ public void DataClassification_SetSensitivityRank()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityRank = SensitivityRank.High };
+
+ table.Columns.Add(column1);
+ table.Create();
+
+ Assert.That(column1.IsClassified, Is.True, "'a' IsClassified");
+ Assert.That(column1.SensitivityRank, Is.EqualTo(SensitivityRank.High), "'a' SensitivityRank");
+ });
+ }
+
+ ///
+ /// Verifies that column becames not classified by clearing its single sensitivity attribute - sensitivity rank
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase, MinMajor = 12)]
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.Standalone, MinMajor = 15)]
+ public void DataClassification_ClearSensitivityRank()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityRank = SensitivityRank.High };
+
+ table.Columns.Add(column1);
+ table.Create();
+
+ column1.SensitivityRank = SensitivityRank.Undefined;
+
+ column1.Alter();
+ column1.Refresh();
+
+ Assert.That(column1.IsClassified, Is.False, "'a' IsClassified");
+ Assert.That(column1.SensitivityRank, Is.EqualTo(SensitivityRank.Undefined), "'a' SensitivityRank");
+ });
+ }
+
+ ///
+ /// Verifies GetClassifiedColumns returns empty list when database has no classified columns
+ /// upon table and column create
+ ///
+ [TestMethod]
+ public void DataClassification_GetClassifiedColumnsEmptyOnCreate()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int);
+ var column2 = new _SMO.Column(table, "b", _SMO.DataType.Int);
+ var column3 = new _SMO.Column(table, "c", _SMO.DataType.Int);
+ var column4 = new _SMO.Column(table, "d", _SMO.DataType.Int);
+
+ table.Columns.Add(column1);
+ table.Columns.Add(column2);
+
+ table.Create();
+
+ column3.Create();
+ column4.Create();
+
+ db.InitializeClassifiedColumns();
+
+ Assert.That(db.Tables, Is.Empty, "db.Tables must be empty");
+ });
+ }
+
+ ///
+ /// Verifies GetClassifiedColumns returns empty list when databasa has no classified columns
+ /// upon table and column alter
+ ///
+ [TestMethod]
+ public void DataClassification_GetClassifiedColumnsEmptyOnAlter()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "'LabelName_A'", SensitivityLabelId = "'LabelId_A'", SensitivityInformationTypeName = "'InfoTypeName_A'", SensitivityInformationTypeId = "'InfoTypeId_A'" };
+ var column2 = new _SMO.Column(table, "b", _SMO.DataType.Int);
+ var column3 = new _SMO.Column(table, "c", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_C", SensitivityInformationTypeName = "InfoTypeName_C" };
+ var column4 = new _SMO.Column(table, "d", _SMO.DataType.Int);
+
+ table.Columns.Add(column1);
+ table.Columns.Add(column2);
+
+ table.Create();
+
+ column3.Create();
+ column4.Create();
+
+ column1.RemoveClassification();
+ column1.Alter();
+
+ column3.RemoveClassification();
+ column3.Alter();
+
+ db.InitializeClassifiedColumns();
+
+ Assert.That(db.Tables, Is.Empty, "db.Tables must be empty");
+ });
+ }
+
+ ///
+ /// Verifies GetClassifiedColumns returns non empty classified columns list when databasa has classified columns
+ /// upon table and column create
+ ///
+ [TestMethod]
+ public void DataClassification_GetClassifiedColumnsNonEmptyOnCreate()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+ var column1 = new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "'LabelName_A'", SensitivityLabelId = "'LabelId_A'", SensitivityInformationTypeName = "'InfoTypeName_A'", SensitivityInformationTypeId = "'InfoTypeId_A'" };
+ var column2 = new _SMO.Column(table, "b", _SMO.DataType.Int);
+ var column3 = new _SMO.Column(table, "c", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_C", SensitivityInformationTypeName = "InfoTypeName_C" };
+ var column4 = new _SMO.Column(table, "d", _SMO.DataType.Int);
+
+ var rank1 = SetSensitivityRank(column1, SensitivityRank.Low);
+ var rank3 = SetSensitivityRank(column3, SensitivityRank.High);
+
+ table.Columns.Add(column1);
+ table.Columns.Add(column2);
+
+ table.Create();
+
+ column3.Create();
+ column4.Create();
+
+ db.InitializeClassifiedColumns();
+
+ List classifiedColumns = new List();
+
+ foreach (Table t in db.Tables)
+ {
+ foreach (Column c in t.Columns)
+ {
+ classifiedColumns.Add(c);
+ }
+ }
+
+ Assert.That(classifiedColumns.Count, Is.EqualTo(2), "classifiedColumns");
+ Assert.That(classifiedColumns[0].SensitivityLabelName, Is.EqualTo("'LabelName_A'"), "'a' SensitivityLabelName");
+ Assert.That(classifiedColumns[0].SensitivityLabelId, Is.EqualTo("'LabelId_A'"), "'a' SensitivityLabelId");
+ Assert.That(classifiedColumns[0].SensitivityInformationTypeName, Is.EqualTo("'InfoTypeName_A'"), "'a' SensitivityInformationTypeName");
+ Assert.That(classifiedColumns[0].SensitivityInformationTypeId, Is.EqualTo("'InfoTypeId_A'"), "'a' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(classifiedColumns[0]), Is.EqualTo(rank1), "'a' SensitivityRank");
+ Assert.That(classifiedColumns[1].SensitivityLabelName, Is.EqualTo("LabelName_C"), "'c' SensitivityLabelName");
+ Assert.That(classifiedColumns[1].SensitivityInformationTypeName, Is.EqualTo("InfoTypeName_C"), "'c' SensitivityInformationTypeName");
+ Assert.That(GetSensitivityRank(classifiedColumns[1]), Is.EqualTo(rank3), "'c' SensitivityRank");
+ });
+ }
+
+ ///
+ /// Verifies GetClassifiedColumns returns non empty classified columns list when databasa has classified columns
+ /// upon table and column alter
+ ///
+ [TestMethod]
+ public void DataClassification_GetClassifiedColumnsNonEmptyOnAlter()
+ {
+ ExecuteWithClean(db =>
+ {
+ var table = new _SMO.Table(db, GenerateUniqueSmoObjectName("ClassifiedTable"));
+
+ table.Columns.Add(new _SMO.Column(table, "a", _SMO.DataType.Int) { SensitivityLabelName = "LabelName_A", SensitivityLabelId = "LabelId_A", SensitivityInformationTypeName = "InfoTypeName_A", SensitivityInformationTypeId = "InfoTypeId_A" });
+ table.Columns.Add(new _SMO.Column(table, "b", _SMO.DataType.Int));
+
+ SetSensitivityRank(table.Columns[0], SensitivityRank.Low);
+
+ table.Create();
+
+ var column1 = table.Columns["a"];
+ var column2 = table.Columns["b"];
+
+ column1.SensitivityLabelName = "'LabelName_A_Changed'";
+ column1.SensitivityLabelId = string.Empty;
+ column1.SensitivityInformationTypeName = "InfoTypeName_A_Changed";
+ column1.SensitivityInformationTypeId = string.Empty;
+ var rank1 = SetSensitivityRank(column1, SensitivityRank.Medium);
+
+ column1.Alter();
+
+ column2.SensitivityInformationTypeName = "InfoTypeName_B_Changed";
+
+ table.Alter();
+
+ List classifiedColumns = new List();
+
+ foreach (Table t in db.Tables)
+ {
+ foreach (Column c in t.Columns)
+ {
+ classifiedColumns.Add(c);
+ }
+ }
+
+ Assert.That(classifiedColumns.Count, Is.EqualTo(2), "classifiedColumns");
+ Assert.That(classifiedColumns[0].SensitivityLabelName, Is.EqualTo("'LabelName_A_Changed'"), "'a' SensitivityLabelName");
+ Assert.That(classifiedColumns[0].SensitivityLabelId, Is.Empty, "'a' SensitivityLabelId");
+ Assert.That(classifiedColumns[0].SensitivityInformationTypeName, Is.EqualTo("InfoTypeName_A_Changed"), "'a' SensitivityInformationTypeName");
+ Assert.That(classifiedColumns[0].SensitivityInformationTypeId, Is.Empty, "'a' SensitivityInformationTypeId");
+ Assert.That(GetSensitivityRank(classifiedColumns[0]), Is.EqualTo(rank1), "'a' SensitivityRank");
+ Assert.That(classifiedColumns[1].SensitivityInformationTypeName, Is.EqualTo("InfoTypeName_B_Changed"), "'b' SensitivityInformationTypeName");
+ });
+ }
+
+ private static SensitivityRank SetSensitivityRank(Column column, SensitivityRank rank)
+ {
+ if (column.IsSupportedProperty("SensitivityRank"))
+ {
+ column.SensitivityRank = rank;
+
+ return column.SensitivityRank;
+ }
+
+ return SensitivityRank.Undefined;
+ }
+
+ private static SensitivityRank GetSensitivityRank(Column column)
+ {
+ return column.IsSupportedProperty("SensitivityRank") ? column.SensitivityRank : SensitivityRank.Undefined;
+ }
+
+ protected override void VerifyIsSmoObjectDropped(SqlSmoObject obj, SqlSmoObject objVerify)
+ {
+
+ }
+
+ private void ExecuteWithClean(Action action)
+ {
+ ExecuteFromDbPool(db =>
+ {
+ List
tables = new List
();
+
+ // avoid "Collection was modified; enumeration operation may not execute"
+ foreach (Table t in db.Tables)
+ {
+ tables.Add(t);
+ }
+
+ foreach (Table t in tables)
+ {
+ t.Drop();
+ }
+
+ db.Refresh();
+
+ action.Invoke(db);
+ });
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/FunctionalTest/Smo/GeneralFunctionality/DatabaseSmoTests.cs b/src/FunctionalTest/Smo/GeneralFunctionality/DatabaseSmoTests.cs
new file mode 100644
index 00000000..58da0c89
--- /dev/null
+++ b/src/FunctionalTest/Smo/GeneralFunctionality/DatabaseSmoTests.cs
@@ -0,0 +1,1077 @@
+// Copyright (c) Microsoft.
+// Licensed under the MIT license.
+
+#if MICROSOFTDATA
+using Microsoft.Data.SqlClient;
+#else
+using System.Data.SqlClient;
+#endif
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using Microsoft.SqlServer.Management.Common;
+using Microsoft.SqlServer.Test.Manageability.Utils;
+using Microsoft.SqlServer.Test.Manageability.Utils.TestFramework;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using NUnit.Framework;
+using System.Linq;
+using System.Threading;
+using Microsoft.SqlServer.Management.Sdk.Sfc;
+using Microsoft.SqlServer.Management.Sdk.Sfc.Metadata;
+using Microsoft.SqlServer.Test.Manageability.Utils.Helpers;
+using _SMO = Microsoft.SqlServer.Management.Smo;
+using Assert = NUnit.Framework.Assert;
+using Microsoft.SqlServer.Management.Smo;
+using System.Data;
+using System.IO;
+
+namespace Microsoft.SqlServer.Test.SMO.GeneralFunctionality
+{
+ ///
+ ///
+ ///
+ [TestClass]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlOnDemand)]
+ public class DatabaseSmoTests : SqlTestBase
+ {
+
+ ///
+ /// In a nutshell, we validate that Database.Alter() on a database in "restoring" state does not throw.
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = DatabaseEngineEdition.Enterprise, HostPlatform = HostPlatformNames.Windows, MaxMajor = 15, MinMajor = 15)]
+ public void Database_Scripting_Alter_When_Database_Is_Restoring_succeeds()
+ {
+ ExecuteFromDbPool((db) =>
+ {
+ string backupfile = null;
+ Database restoringDb = null;
+
+ try
+ {
+ // Take full backup of the database
+ db.TakeFullBackup();
+
+ // Get path to the backup file
+ backupfile = Path.Combine(db.Parent.BackupDirectory, $"{db.Name}.bak");
+
+ // Restore database with NORECOVERY (in OE, it shos as "Restoring..."
+ restoringDb = DatabaseObjectHelpers.RestoreDatabaseFromBackup(db.Parent, backupfile, db.Name + "_new", withNoRecovery: true);
+
+ // Trying to Alter() the DB should NOT throw
+ Assert.DoesNotThrow(restoringDb.Alter, "Alter of a restoring DB failed!");
+ }
+ finally
+ {
+ // Cleanup after ourselves
+ if (backupfile != null)
+ {
+ // Best effort... can be removed once I create the scheduled task that deletes
+ // old .bak files on the servers.
+ var guessedRemotePath = @"\\" + db.Parent.Name + @"\" + backupfile.Replace(':', '$');
+ try
+ {
+ if (File.Exists(guessedRemotePath))
+ {
+ File.Delete(guessedRemotePath);
+ Trace.TraceInformation($"Best effort cleanup succeeded: {guessedRemotePath}");
+ }
+ }
+ catch
+ {
+ Trace.TraceInformation($"Best effort cleanup failed: {guessedRemotePath}");
+ }
+ }
+
+ if (restoringDb != null)
+ {
+ restoringDb.Drop();
+ }
+
+ }
+ });
+ }
+
+
+ ///
+ /// Tests that 'schema' is reported correctly (i.e. not blank all the time)
+ /// by the EnumObjects() method. Also, we validate the other 3 properties.
+ ///
+ ///
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlDataWarehouse)]
+ public void EnumObjects_Sets_Synonym_Schema_And_Other_Properties()
+ {
+ ExecuteWithDbDrop("Synonym",
+ database =>
+ {
+ //
+ var expectedSynSchema = database.Schemas[0].Name; // any existing schema is good
+ var expectedSynName = SmoObjectHelpers.GenerateUniqueObjectName(); // a given random name
+
+ // Create a new synonym
+ _SMO.Synonym syn = new _SMO.Synonym(database, expectedSynName)
+ {
+ BaseDatabase = database.Name,
+ BaseSchema = "dummySchema", // dummy value - does not matter
+ BaseObject = "dummyObj", // dummy value - does not matter
+ Schema = expectedSynSchema
+ };
+
+ syn.Create();
+
+ // Now, we retrieve the same object we just created by calling EnumObjects()
+ var objs = database.EnumObjects(DatabaseObjectTypes.Synonym);
+ var synobj = objs.Rows.Cast().Where(r => (string)r["Name"] == expectedSynName).Single();
+
+ // The original bug was that Schema was coming back as blank, because there was an assumption
+ // that synonyms did not have a schema (which was incorrect)
+ Assert.That(synobj["Schema"], Is.EqualTo(expectedSynSchema), "Unexpected value for Schema");
+
+ // While we are at it, let's also check the other properties...
+ Assert.That(synobj["DatabaseObjectTypes"], Is.EqualTo("Synonym"), "Unexpected value for DatabaseObjectTypes");
+ Assert.That(synobj["Name"], Is.EqualTo(expectedSynName), "Unexpected value for Name");
+ Assert.That(synobj["Urn"], Is.EqualTo(syn.Urn.ToString()), "Unexpected value for Urn");
+ });
+ }
+
+ ///
+ ///
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = DatabaseEngineEdition.SqlDatabase)]
+ public void Database_Drop_does_not_throw_when_Parent_is_user_database()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ var connString = new SqlConnectionStringBuilder(this.SqlConnectionStringBuilder.ConnectionString)
+ {
+ InitialCatalog = db.Name
+ };
+ var server = new _SMO.Server(new ServerConnection(new SqlConnection(connString.ConnectionString)));
+ var dbToDrop = server.Databases[db.Name];
+ var droppedEventRaised = false;
+ SmoApplication.EventsSingleton.ObjectDropped += (s, e) =>
+ {
+ if (e.Urn == dbToDrop.Urn) droppedEventRaised = true;
+ };
+ Assert.DoesNotThrow(() => dbToDrop.Drop(), "Drop when initial catalog is database name");
+ Assert.Throws<_SMO.FailedOperationException>(() => db.Refresh(), "Refresh should fail after db is dropped");
+ Assert.That(droppedEventRaised, Is.EqualTo(true), "Dropped event should be raised");
+ });
+ }
+
+ ///
+ /// DW does not support Drop calls on the user database directly
+ ///
+ [TestMethod]
+ [SupportedServerVersionRange(Edition = DatabaseEngineEdition.SqlDataWarehouse)]
+ public void Database_Drop_DW_throws_when_Parent_is_user_database()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ var connString = new SqlConnectionStringBuilder(this.SqlConnectionStringBuilder.ConnectionString)
+ {
+ InitialCatalog = db.Name
+ };
+ var server = new _SMO.Server(new ServerConnection(new SqlConnection(connString.ConnectionString)));
+ var dbToDrop = server.Databases[db.Name];
+
+ var ex = Assert.Throws<_SMO.FailedOperationException>(() => dbToDrop.Drop(), "Drop when initial catalog is DW database name");
+ Assert.That(ex.InnerException, Is.InstanceOf(),
+ "InnerException should be ExecutionFailureException");
+ Assert.That(ex.InnerException.InnerException, Is.InstanceOf(), "InnerException.InnerException should be SqlException");
+ Assert.That(ex.InnerException.InnerException.Message, Contains.Substring("'Drop Database' is not supported"),
+ "InnerException message");
+ });
+ }
+
+ ///
+ /// Verify that property SpaceAvailable is reported as 0 (n/a) for SQL DW.
+ ///
+ [TestMethod]
+ [TestCategory("Legacy")] /* slow test, not for PR validation */
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase, Edition = DatabaseEngineEdition.SqlDataWarehouse)]
+ public void Database_SpaceAvailable_Is_Zero_For_DW()
+ {
+ ExecuteTest(
+ srv => {
+ Database_SpaceAvailable_Is_Zero(AzureDatabaseEdition.DataWarehouse);
+ });
+ }
+
+ ///
+ /// Verify that property SpaceAvailable is reported as 0 (n/a) for SQL DB (Hyperscale edition).
+ ///
+ [TestMethod]
+ [TestCategory("Legacy")] /* slow test, not for PR validation */
+ [SupportedServerVersionRange(DatabaseEngineType = DatabaseEngineType.SqlAzureDatabase, Edition = DatabaseEngineEdition.SqlDatabase)]
+ public void Database_SpaceAvailable_Is_Zero_For_Hyperscale()
+ {
+ ExecuteTest(
+ srv => {
+ Database_SpaceAvailable_Is_Zero(AzureDatabaseEdition.Hyperscale);
+ });
+ }
+
+ ///
+ /// Helped method for the 2 above tests to assert that the SpaceAvailable property is 0 for the given Azure Database Edition.
+ ///
+ private void Database_SpaceAvailable_Is_Zero(AzureDatabaseEdition ade)
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ Assert.That(db.SpaceAvailable, Is.EqualTo(0), $"Unexpected SpaceAvailable for azure edition '{db.AzureEdition}'");
+ }, ade);
+ }
+
+ [TestMethod]
+ [TestCategory("Legacy")] /* test prone to race condition (so it seems), not for PR validation */
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlDataWarehouse, DatabaseEngineEdition.SqlOnDemand, DatabaseEngineEdition.Express)]
+ public void Database_enabling_encryption_creates_encryption_key()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ var encryptionEnabled = !db.EncryptionEnabled;
+ db.EncryptionEnabled = encryptionEnabled;
+ // on premises servers post-2008 r2 require an encryption key
+ if (db.IsSupportedProperty(nameof(db.HasDatabaseEncryptionKey)) && db.DatabaseEncryptionKey.State != SqlSmoState.Existing)
+ {
+ // Script snippet copied from databaseencryptionkey_smotestsuite to use the same cert name
+ db.Parent.ConnectionContext.ExecuteNonQuery($@"--First create master key for server (on master db)
+IF NOT EXISTS (SELECT * FROM sys.symmetric_keys WHERE symmetric_key_id = 101)
+BEGIN
+ CREATE MASTER KEY ENCRYPTION BY PASSWORD = '{SqlTestRandom.GeneratePassword()}'
+END
+GO
+
+--Then create server-scoped certificate
+IF NOT EXISTS (SELECT * FROM sys.certificates where name = 'DEK_SmoTestSuite_ServerCertificate')
+BEGIN
+ CREATE CERTIFICATE DEK_SmoTestSuite_ServerCertificate
+ WITH SUBJECT = 'Database Encryption Key Test Server Certificate',
+ EXPIRY_DATE = '30001031',
+ START_DATE = '20121031'
+END");
+ db.DatabaseEncryptionKey.EncryptorName = "DEK_SmoTestSuite_ServerCertificate";
+ db.DatabaseEncryptionKey.EncryptionAlgorithm = DatabaseEncryptionAlgorithm.Aes256;
+ db.DatabaseEncryptionKey.EncryptionType = DatabaseEncryptionType.ServerCertificate;
+ Assert.That(db.DatabaseEncryptionKey.State, Is.EqualTo(SqlSmoState.Creating), "DatabaseEncryptionKey.State pre-Alter");
+ }
+
+ db.Alter();
+ db.Refresh();
+ Assert.That(db.EncryptionEnabled, Is.EqualTo(encryptionEnabled), "EncryptionEnabled after Alter");
+ if (db.IsSupportedProperty(nameof(db.HasDatabaseEncryptionKey)))
+ {
+ Assert.That(db.DatabaseEncryptionKey.State, Is.EqualTo(SqlSmoState.Existing),
+ "DatabaseEncryptionKey.State post-Alter");
+ // We can't immediately toggle encryption until the first change is done
+ var maxWaits = 300;
+ while (maxWaits-- > 0 &&
+ (db.DatabaseEncryptionKey.EncryptionState == DatabaseEncryptionState.EncryptionInProgress || db.DatabaseEncryptionKey.EncryptionState == DatabaseEncryptionState.DecryptionInProgress))
+ {
+ Thread.Sleep(100);
+ db.DatabaseEncryptionKey.Refresh();
+ }
+ db.EnableEncryption(!encryptionEnabled);
+ db.Refresh();
+ Assert.That(db.EncryptionEnabled, Is.EqualTo(!encryptionEnabled), $"EncryptionEnabled after EnableEncryption({!encryptionEnabled})");
+ }
+ });
+ }
+ ///
+ /// This is a catch-all method for covering various database methods without wasting
+ /// code on a separate test-per-method.
+ ///
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlDataWarehouse, DatabaseEngineEdition.SqlOnDemand)]
+ public void Database_miscellaneous_methods_produce_correct_sql()
+ {
+ ExecuteWithDbDrop(db =>
+ {
+ // dbcc checkdb requires single user mode
+ if (db.DatabaseEngineEdition != DatabaseEngineEdition.SqlDatabase)
+ {
+ db.UserAccess = DatabaseUserAccess.Single;
+ }
+ db.AutoUpdateStatisticsEnabled = false;
+ db.AutoCreateStatisticsEnabled = true;
+ db.Alter();
+ var table = db.CreateTable("t1", new ColumnProperties("c1", DataType.Int));
+ table.InsertDataToTable(100);
+ var view = db.CreateView("v1", "dbo", $"select c1 from [dbo].{table.Name.SqlBracketQuoteString()}", isSchemaBound: true);
+ var index = new _SMO.Index(view, "i1") { IsClustered = true, IsUnique = true };
+ index.IndexedColumns.Add(new _SMO.IndexedColumn(index, "c1"));
+ index.Create();
+ db.ExecutionManager.ConnectionContext.SqlExecutionModes = SqlExecutionModes.ExecuteAndCaptureSql;
+ Assert.Multiple(() =>
+ {
+ db.UpdateIndexStatistics();
+ var commands = db.ExecutionManager.ConnectionContext.CapturedSql.Text.Cast();
+ Assert.That(commands, Has.Member($"UPDATE STATISTICS [dbo].{table.Name.SqlBracketQuoteString()}"), "UpdateIndexStatistics should include table t1");
+ Assert.That(commands, Has.Member($"UPDATE STATISTICS [dbo].{view.Name.SqlBracketQuoteString()}"), "UpdateIndexStatistics should include view v1");
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ PrefetchAllChildTypes(db);
+ GetArchiveReports(db);
+ if (db.DatabaseEngineEdition != DatabaseEngineEdition.SqlDatabase && db.DatabaseEngineEdition != DatabaseEngineEdition.SqlManagedInstance)
+ {
+ CheckTablesDataOnlyAllRepairTypes(db);
+ CheckTablesAllRepairTypes(db);
+ CheckAllocationsAllRepairTypes(db);
+ }
+ });
+
+ });
+ }
+
+ [TestMethod]
+ [UnsupportedDatabaseEngineEdition(DatabaseEngineEdition.SqlDataWarehouse, DatabaseEngineEdition.SqlOnDemand)]
+ public void Database_shrink()
+ {
+ ExecuteFromDbPool((db) =>
+ {
+ var table = db.CreateTable("t1", new ColumnProperties("c1", DataType.Int));
+ table.InsertDataToTable(100);
+ db.ExecutionManager.ConnectionContext.SqlExecutionModes = SqlExecutionModes.ExecuteAndCaptureSql;
+ Assert.Multiple(() =>
+ {
+ db.Shrink(101, ShrinkMethod.Default);
+ var commands = db.ExecutionManager.ConnectionContext.CapturedSql.Text.Cast();
+ Assert.That(commands, Has.Member($"DBCC SHRINKDATABASE(N{db.Name.SqlSingleQuoteString()}, 100 )"), "shrink 101 default");
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ db.Shrink(0, ShrinkMethod.NoTruncate);
+ commands = db.ExecutionManager.ConnectionContext.CapturedSql.Text.Cast();
+ Assert.That(commands, Has.Member($"DBCC SHRINKDATABASE(N{db.Name.SqlSingleQuoteString()}, NOTRUNCATE)"), "shrink 0 notruncate");
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ db.Shrink(0, ShrinkMethod.TruncateOnly);
+ commands = db.ExecutionManager.ConnectionContext.CapturedSql.Text.Cast();
+ Assert.That(commands, Has.Member($"DBCC SHRINKDATABASE(N{db.Name.SqlSingleQuoteString()}, TRUNCATEONLY)"), "shrink 0 truncateonly");
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ Assert.Throws(() => db.Shrink(0, ShrinkMethod.EmptyFile));
+ });
+ });
+ }
+
+
+ ///
+ /// This is the set of child object types that don't have a Database.PrefetchObjectsImpl implementation.
+ /// As of Dec 2019 it's not clear what criteria to use to choose which child collections should
+ /// be part of this API. It might be intended for objects that themselves have specific child object types
+ /// that the user would want to include in the fetch in order to create the script.
+ ///
+ private static readonly HashSet nonPrefetchedTypes = new HashSet
+ {
+ typeof(DatabaseScopedCredential),
+ typeof(Synonym),
+ typeof(FullTextCatalog),
+ typeof(FullTextStopList),
+ typeof(SearchPropertyList),
+ typeof(SecurityPolicy),
+ typeof(Certificate),
+ typeof(SymmetricKey),
+ typeof(AsymmetricKey),
+ typeof(ApplicationRole),
+ typeof(ExternalLanguage)
+ };
+
+ private static readonly IEnumerable extraPrefetchedTypes = new[]
+ {
+ typeof(Default),
+ typeof(_SMO.Rule),
+ typeof(PartitionScheme),
+ typeof(PartitionFunction),
+ };
+
+ private static void PrefetchAllChildTypes(_SMO.Database db)
+ {
+ var disc = new SfcMetadataDiscovery(typeof(_SMO.Database));
+ var tObjectPermission = typeof(_SMO.IObjectPermission);
+ var objectTypes = disc.Objects.Where(r =>
+ r.ContainerType != null && tObjectPermission.IsAssignableFrom(r.Type)).Select(r => r.Type)
+ .Except(nonPrefetchedTypes).ToList();
+ Assert.That(objectTypes, Is.Not.Empty, "Database should have collections of child objects that implement IObjectPermission");
+ Assert.Multiple(() =>
+ {
+ foreach (var relation in objectTypes.Union(extraPrefetchedTypes))
+ {
+ Assert.DoesNotThrow(() => db.PrefetchObjects(relation), $"Database.PrefetchObjects should support type {relation.Name}. If Prefetch support is not needed, update nonPrefetchedTypes.");
+ }
+
+ Assert.That(() => db.PrefetchObjects(typeof(SqlSmoObject)), Throws.InstanceOf(), "PrefetchObjects(typeof(SqlSmoObject))");
+ Assert.That(() => db.PrefetchObjects(null), Throws.InstanceOf(), "PrefetchObjects(null)");
+ Assert.That(() => db.PrefetchObjects(typeof(Table), (ScriptingPreferences)null), Throws.InstanceOf(), "PrefetchObjects(Table, (ScriptingPreferences)null)");
+ Assert.That(() => db.PrefetchObjects(typeof(Table), (ScriptingOptions)null), Throws.InstanceOf(), "PrefetchObjects(Table, (ScriptingOptions)null)");
+ });
+ }
+
+ private static void GetArchiveReports(_SMO.Database db)
+ {
+ if (db.IsSupportedProperty(nameof(db.RemoteDataArchiveEnabled)))
+ {
+ var statusReports =
+ db.GetRemoteDataArchiveMigrationStatusReports(
+ DateTime.UtcNow.Subtract(TimeSpan.FromDays(1)), 2).ToList();
+ Assert.That(statusReports, Is.Empty, "GetRemoteDataArchiveMigrationStatusReports should return empty list for non-archived DB");
+ var commands = db.ExecutionManager.ConnectionContext.CapturedSql.Text.Cast();
+ var select = commands.Single(c => c.Contains("INNER JOIN"));
+ Assert.That(select, Contains.Substring("SELECT\nTOP (2) dbs.name as database_name,"), "GetRemoteDataArchiveMigrationStatusReports should select 2 rows");
+ Assert.That(select, Contains.Substring("FROM\nsys.dm_db_rda_migration_status rdams"), "GetRemoteDataArchiveMigrationStatusReports should query ys.dm_db_rda_migration_status");
+ db.ExecutionManager.ConnectionContext.CapturedSql.Clear();
+ }
+ }
+
+ private static void CheckTablesDataOnlyAllRepairTypes(_SMO.Database db)
+ {
+ var messages = db.CheckTablesDataOnly().Cast