Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,15 @@ stages:
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.6-bin-hadoop2.7

- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 3.0.0'
inputs:
command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-3.0.0-bin-hadoop2.7

- stage: ForwardCompatibility
displayName: E2E Forward Compatibility Tests
dependsOn: Build
Expand Down Expand Up @@ -549,3 +558,12 @@ stages:
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-2.4.6-bin-hadoop2.7

- task: DotNetCoreCLI@2
displayName: 'E2E tests for Spark 3.0.0'
inputs:
command: test
projects: '**/Microsoft.Spark*.E2ETest/*.csproj'
arguments: '--configuration $(buildConfiguration) --filter $(TestsToFilterOut)'
env:
SPARK_HOME: $(Build.BinariesDirectory)\spark-3.0.0-bin-hadoop2.7

2 changes: 1 addition & 1 deletion script/download-spark-distros.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ curl -k -L -o spark-2.4.3.tgz https://archive.apache.org/dist/spark/spark-2.4.3/
curl -k -L -o spark-2.4.4.tgz https://archive.apache.org/dist/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz && tar xzvf spark-2.4.4.tgz
curl -k -L -o spark-2.4.5.tgz https://archive.apache.org/dist/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.7.tgz && tar xzvf spark-2.4.5.tgz
curl -k -L -o spark-2.4.6.tgz https://archive.apache.org/dist/spark/spark-2.4.6/spark-2.4.6-bin-hadoop2.7.tgz && tar xzvf spark-2.4.6.tgz
curl -k -L -o spark-3.0.0.tgz https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz && tar xzvf spark-3.0.0.tgz

endlocal

Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,17 @@ public class DeltaFixture

public DeltaFixture()
{
Version sparkVersion = SparkSettings.Version;
string deltaVersion = sparkVersion.Major switch
{
2 => "delta-core_2.11:0.6.1",
3 => "delta-core_2.12:0.7.0",
_ => throw new NotSupportedException($"Spark {sparkVersion} not supported.")
};

Environment.SetEnvironmentVariable(
SparkFixture.EnvironmentVariableNames.ExtraSparkSubmitArgs,
"--packages io.delta:delta-core_2.11:0.6.1 " +
$"--packages io.delta:{deltaVersion} " +
"--conf spark.databricks.delta.snapshotPartitions=2 " +
"--conf spark.sql.sources.parallelPartitionDiscovery.parallelism=5");
SparkFixture = new SparkFixture();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public DeltaTableTests(DeltaFixture fixture)
/// Run the end-to-end scenario from the Delta Quickstart tutorial.
/// </summary>
/// <see cref="https://docs.delta.io/latest/quick-start.html"/>
[SkipIfSparkVersionIsLessThan(Versions.V2_4_2)]
[SkipIfSparkVersionIsNotInRange(Versions.V2_4_2, Versions.V3_0_0)]
public void TestTutorialScenario()
{
using var tempDirectory = new TemporaryDirectory();
Expand Down Expand Up @@ -223,7 +223,7 @@ void testWrapper(
/// <summary>
/// Test that methods return the expected signature.
/// </summary>
[SkipIfSparkVersionIsLessThan(Versions.V2_4_2)]
[SkipIfSparkVersionIsNotInRange(Versions.V2_4_2, Versions.V3_0_0)]
public void TestSignatures()
{
using var tempDirectory = new TemporaryDirectory();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,17 @@ public class HyperspaceFixture
{
public HyperspaceFixture()
{
Version sparkVersion = SparkSettings.Version;
string hyperspaceVersion = sparkVersion.Major switch
{
2 => "hyperspace-core_2.11:0.2.0",
3 => "hyperspace-core_2.12:0.2.0",
_ => throw new NotSupportedException($"Spark {sparkVersion} not supported.")
};

Environment.SetEnvironmentVariable(
SparkFixture.EnvironmentVariableNames.ExtraSparkSubmitArgs,
"--packages com.microsoft.hyperspace:hyperspace-core_2.11:0.1.0");
$"--packages com.microsoft.hyperspace:{hyperspaceVersion}");

SparkFixture = new SparkFixture();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public void Dispose()
/// <summary>
/// Test the method signatures for all Hyperspace APIs.
/// </summary>
[SkipIfSparkVersionIsLessThan(Versions.V2_4_0)]
[SkipIfSparkVersionIsNotInRange(Versions.V2_4_0, Versions.V3_0_0)]
public void TestSignatures()
{
// Indexes API.
Expand Down Expand Up @@ -86,7 +86,7 @@ public void TestSignatures()
/// <summary>
/// Test E2E functionality of index CRUD APIs.
/// </summary>
[SkipIfSparkVersionIsLessThan(Versions.V2_4_0)]
[SkipIfSparkVersionIsNotInRange(Versions.V2_4_0, Versions.V3_0_0)]
public void TestIndexCreateAndDelete()
{
// Should be one active index.
Expand Down Expand Up @@ -116,7 +116,7 @@ public void TestIndexCreateAndDelete()
/// <summary>
/// Test that the explain API generates the expected string.
/// </summary>
[SkipIfSparkVersionIsLessThan(Versions.V2_4_0)]
[SkipIfSparkVersionIsNotInRange(Versions.V2_4_0, Versions.V3_0_0)]
public void TestExplainAPI()
{
// Run a query that hits the index.
Expand Down
7 changes: 4 additions & 3 deletions src/csharp/Microsoft.Spark.E2ETest/IpcTests/BroadcastTests.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using System;
using System.Linq;
using Microsoft.Spark.E2ETest.Utils;
using Microsoft.Spark.Sql;
using Xunit;
using static Microsoft.Spark.Sql.Functions;
Expand Down Expand Up @@ -34,7 +35,7 @@ public BroadcastTests(SparkFixture fixture)
/// <summary>
/// Test Broadcast support by using multiple broadcast variables in a UDF.
/// </summary>
[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestMultipleBroadcastWithoutEncryption()
{
var obj1 = new TestBroadcastVariable(1, "first");
Expand All @@ -55,7 +56,7 @@ public void TestMultipleBroadcastWithoutEncryption()
/// Test Broadcast.Destroy() that destroys all data and metadata related to the broadcast
/// variable and makes it inaccessible from workers.
/// </summary>
[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestDestroy()
{
var obj1 = new TestBroadcastVariable(5, "destroy");
Expand Down Expand Up @@ -96,7 +97,7 @@ public void TestDestroy()
/// Test Broadcast.Unpersist() deletes cached copies of the broadcast on the executors. If
/// the broadcast is used after unpersist is called, it is re-sent to the executors.
/// </summary>
[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestUnpersist()
{
var obj = new TestBroadcastVariable(1, "unpersist");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ public void TestUDF()
}
}

[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestVectorUdf()
{
Func<Int32Array, StringArray, StringArray> udf1Func =
Expand Down Expand Up @@ -224,7 +224,7 @@ public void TestVectorUdf()
}
}

[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestDataFrameVectorUdf()
{
Func<Int32DataFrameColumn, ArrowStringDataFrameColumn, ArrowStringDataFrameColumn> udf1Func =
Expand Down Expand Up @@ -290,7 +290,7 @@ public void TestDataFrameVectorUdf()
}
}

[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestGroupedMapUdf()
{
DataFrame df = _spark
Expand Down Expand Up @@ -369,7 +369,7 @@ private static RecordBatch ArrowBasedCountCharacters(RecordBatch records)
}


[Fact]
[SkipIfSparkVersionIsGreaterOrEqualTo(Versions.V3_0_0)]
public void TestDataFrameGroupedMapUdf()
{
DataFrame df = _spark
Expand Down
8 changes: 7 additions & 1 deletion src/csharp/Microsoft.Spark.E2ETest/SparkSettings.cs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,13 @@ private static void InitVersion()
// Spark 2.3.2 built for Hadoop 2.7.3
string firstLine =
File.ReadLines($"{SparkHome}{Path.DirectorySeparatorChar}RELEASE").First();
Version = new Version(firstLine.Split(' ')[1]);

// Grab "2.3.2" from "Spark 2.3.2 built for Hadoop 2.7.3"
string versionStr = firstLine.Split(' ')[1];

// Strip anything below version number.
// For example, "3.0.0-preview" should become "3.0.0".
Version = new Version(versionStr.Split('-')[0]);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is technically not needed, but I didn't remove this b/c it could be useful when testing preview versions.

}
}
}
27 changes: 26 additions & 1 deletion src/csharp/Microsoft.Spark.E2ETest/Utils/VersionBasedFacts.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,32 @@ public SkipIfSparkVersionIsLessThan(string version)
{
if (SparkSettings.Version < new Version(version))
{
Skip = $"Ignore on Spark version ({SparkSettings.Version}) <= {version}";
Skip = $"Ignore on Spark version ({SparkSettings.Version}) < {version}";
}
}
}

public sealed class SkipIfSparkVersionIsGreaterOrEqualTo : FactAttribute
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is taken from #653.

{
public SkipIfSparkVersionIsGreaterOrEqualTo(string version)
{
if (SparkSettings.Version >= new Version(version))
{
Skip = $"Ignore on Spark version ({SparkSettings.Version}) >= {version}";
}
}
}

// Skip if the spark version is not in range [minVersion, maxVersion).
public sealed class SkipIfSparkVersionIsNotInRange : FactAttribute
{
public SkipIfSparkVersionIsNotInRange(string minInclusive, string maxExclusive)
{
if (SparkSettings.Version < new Version(minInclusive) ||
SparkSettings.Version >= new Version(maxExclusive))
{
Skip = $"Ignore on Spark version ({SparkSettings.Version}) not in range of " +
$"[{minInclusive}, {maxExclusive})";
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/scala/microsoft-spark-3.0.x/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
<encoding>UTF-8</encoding>
<scala.version>2.12.8</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<spark.version>3.0.0-preview</spark.version>
<spark.version>3.0.0</spark.version>
</properties>

<dependencies>
Expand Down
1 change: 1 addition & 0 deletions src/scala/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
<modules>
<module>microsoft-spark-2.3.x</module>
<module>microsoft-spark-2.4.x</module>
<module>microsoft-spark-3.0.x</module>
</modules>

<pluginRepositories>
Expand Down