- bin/spark-submit --jars external/kinesis-asl/target/scala-*/\
+ bin/spark-submit --jars external/kinesis-asl/target/\
spark-streaming-kinesis-asl-assembly_*.jar \
external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py \
[Kinesis app name] [Kinesis stream name] [endpoint URL] [region name]
diff --git a/examples/pom.xml b/examples/pom.xml
index 4a20370f0668d..790eb9d659748 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
@@ -280,8 +280,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
org.apache.maven.plugins
diff --git a/examples/src/main/python/streaming/direct_kafka_wordcount.py b/examples/src/main/python/streaming/direct_kafka_wordcount.py
index 7097f7f4502bd..f0260b6539b39 100644
--- a/examples/src/main/python/streaming/direct_kafka_wordcount.py
+++ b/examples/src/main/python/streaming/direct_kafka_wordcount.py
@@ -24,7 +24,7 @@
and then run the example
`$ bin/spark-submit --jars \
- external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
+ external/kafka-assembly/target/spark-streaming-kafka-assembly-*.jar \
examples/src/main/python/streaming/direct_kafka_wordcount.py \
localhost:9092 test`
"""
diff --git a/examples/src/main/python/streaming/flume_wordcount.py b/examples/src/main/python/streaming/flume_wordcount.py
index d75bc6daac138..a49f8909380da 100644
--- a/examples/src/main/python/streaming/flume_wordcount.py
+++ b/examples/src/main/python/streaming/flume_wordcount.py
@@ -24,7 +24,7 @@
and then run the example
`$ bin/spark-submit --jars \
- external/flume-assembly/target/scala-*/spark-streaming-flume-assembly-*.jar \
+ external/flume-assembly/target/spark-streaming-flume-assembly-*.jar \
examples/src/main/python/streaming/flume_wordcount.py \
localhost 12345
"""
diff --git a/examples/src/main/python/streaming/kafka_wordcount.py b/examples/src/main/python/streaming/kafka_wordcount.py
index 8d697f620f467..2eb204643a3c1 100644
--- a/examples/src/main/python/streaming/kafka_wordcount.py
+++ b/examples/src/main/python/streaming/kafka_wordcount.py
@@ -24,7 +24,7 @@
and then run the example
`$ bin/spark-submit --jars \
- external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
+ external/kafka-assembly/target/spark-streaming-kafka-assembly-*.jar \
examples/src/main/python/streaming/kafka_wordcount.py \
localhost:2181 test`
"""
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
index 2282bd2b7d680..5382317361f50 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
@@ -89,7 +89,7 @@ object BinaryClassification {
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.BinaryClassification \
- | examples/target/scala-*/spark-examples-*.jar \
+ | examples/target/spark-examples-*.jar \
| --algorithm LR --regType L2 --regParam 1.0 \
| data/mllib/sample_binary_classification_data.txt
""".stripMargin)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
index e003f35ed399f..c170296345bfb 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/Correlations.scala
@@ -51,7 +51,7 @@ object Correlations {
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.Correlations \
- | examples/target/scala-*/spark-examples-*.jar \
+ | examples/target/spark-examples-*.jar \
| --input data/mllib/sample_linear_regression_data.txt
""".stripMargin)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
index f87611f5d4613..71ef255d98312 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
@@ -76,7 +76,7 @@ object LinearRegression {
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.LinearRegression \
- | examples/target/scala-*/spark-examples-*.jar \
+ | examples/target/spark-examples-*.jar \
| data/mllib/sample_linear_regression_data.txt
""".stripMargin)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
index 09750e53cb169..2ce3dec6cbb94 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
@@ -83,7 +83,7 @@ object MovieLensALS {
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.MovieLensALS \
- | examples/target/scala-*/spark-examples-*.jar \
+ | examples/target/spark-examples-*.jar \
| --rank 5 --numIterations 20 --lambda 1.0 --kryo \
| data/mllib/sample_movielens_data.txt
""".stripMargin)
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
index 3c598172dadf0..950ddb00d67e4 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MultivariateSummarizer.scala
@@ -52,7 +52,7 @@ object MultivariateSummarizer {
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.MultivariateSummarizer \
- | examples/target/scala-*/spark-examples-*.jar \
+ | examples/target/spark-examples-*.jar \
| --input data/mllib/sample_linear_regression_data.txt
""".stripMargin)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
index 0da4005977d1a..f9ecc9e4dac97 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
@@ -48,7 +48,7 @@ object SampledRDDs {
|For example, the following command runs this app:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.SampledRDDs \
- | examples/target/scala-*/spark-examples-*.jar
+ | examples/target/spark-examples-*.jar
""".stripMargin)
}
diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml
index 17fd7d781c9ab..040aec1f82f98 100644
--- a/external/docker-integration-tests/pom.xml
+++ b/external/docker-integration-tests/pom.xml
@@ -21,7 +21,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
diff --git a/external/flume-assembly/pom.xml b/external/flume-assembly/pom.xml
index ac15b93c048da..2911019d5a5ef 100644
--- a/external/flume-assembly/pom.xml
+++ b/external/flume-assembly/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -107,8 +107,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
org.apache.maven.plugins
diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml
index e4effe158c826..a828d4381e797 100644
--- a/external/flume-sink/pom.xml
+++ b/external/flume-sink/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -96,8 +96,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
org.apache.avro
@@ -105,7 +103,7 @@
${avro.version}
- ${project.basedir}/target/scala-${scala.binary.version}/src_managed/main/compiled_avro
+ ${project.basedir}/target/src_managed/main/compiled_avro
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index d650dd034d636..a1741439d7423 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -71,8 +71,4 @@
spark-test-tags_${scala.binary.version}
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml
index 1ea9196e9dfe3..d9e3578c07435 100644
--- a/external/java8-tests/pom.xml
+++ b/external/java8-tests/pom.xml
@@ -19,7 +19,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
diff --git a/external/kafka-assembly/pom.xml b/external/kafka-assembly/pom.xml
index 62818f5e8f434..634458834f37d 100644
--- a/external/kafka-assembly/pom.xml
+++ b/external/kafka-assembly/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -134,8 +134,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
org.apache.maven.plugins
diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml
index 68d52e9339b3d..08f30b783485b 100644
--- a/external/kafka/pom.xml
+++ b/external/kafka/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -91,8 +91,4 @@
spark-test-tags_${scala.binary.version}
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/external/kinesis-asl-assembly/pom.xml b/external/kinesis-asl-assembly/pom.xml
index d1c38c7ca5d69..8ec6071c07a8f 100644
--- a/external/kinesis-asl-assembly/pom.xml
+++ b/external/kinesis-asl-assembly/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -129,8 +129,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
org.apache.maven.plugins
diff --git a/external/kinesis-asl/pom.xml b/external/kinesis-asl/pom.xml
index 935155eb5d362..94b572c096de8 100644
--- a/external/kinesis-asl/pom.xml
+++ b/external/kinesis-asl/pom.xml
@@ -19,7 +19,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -80,8 +80,4 @@
spark-test-tags_${scala.binary.version}
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py b/external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py
index 4d7fc9a549bfb..b5675d3790a4d 100644
--- a/external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py
+++ b/external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py
@@ -34,7 +34,7 @@
$ export AWS_SECRET_KEY=
# run the example
- $ bin/spark-submit -jar external/kinesis-asl/target/scala-*/\
+ $ bin/spark-submit -jar external/kinesis-asl/target/\
spark-streaming-kinesis-asl-assembly_*.jar \
external/kinesis-asl/src/main/python/examples/streaming/kinesis_wordcount_asl.py \
myAppName mySparkStream https://kinesis.us-east-1.amazonaws.com
diff --git a/external/spark-ganglia-lgpl/pom.xml b/external/spark-ganglia-lgpl/pom.xml
index bfb92791de3d8..baa8c0a4414a0 100644
--- a/external/spark-ganglia-lgpl/pom.xml
+++ b/external/spark-ganglia-lgpl/pom.xml
@@ -19,7 +19,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
diff --git a/graphx/pom.xml b/graphx/pom.xml
index 1813f383cdcba..dfcf3e1615ffa 100644
--- a/graphx/pom.xml
+++ b/graphx/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
@@ -75,8 +75,4 @@
spark-test-tags_${scala.binary.version}
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/launcher/pom.xml b/launcher/pom.xml
index ef731948826ef..f46bc186cacfa 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -21,13 +21,13 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
org.apache.spark
- spark-launcher_2.11
+ spark-launcher
jar
Spark Project Launcher
http://spark.apache.org/
@@ -63,11 +63,6 @@
test
-
- org.apache.spark
- spark-test-tags_${scala.binary.version}
-
-
org.apache.hadoop
@@ -75,9 +70,4 @@
test
-
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index c7488082ca899..11efef75d04c2 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -144,7 +144,6 @@ List buildClassPath(String appClassPath) throws IOException {
boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES"));
boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
if (prependClasses || isTesting) {
- String scala = getScalaVersion();
List projects = Arrays.asList(
"common/network-common",
"common/network-shuffle",
@@ -172,14 +171,12 @@ List buildClassPath(String appClassPath) throws IOException {
"assembly.");
}
for (String project : projects) {
- addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project,
- scala));
+ addToClassPath(cp, String.format("%s/%s/target/classes", sparkHome, project));
}
}
if (isTesting) {
for (String project : projects) {
- addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome,
- project, scala));
+ addToClassPath(cp, String.format("%s/%s/target/test-classes", sparkHome, project));
}
}
@@ -192,7 +189,7 @@ List buildClassPath(String appClassPath) throws IOException {
// propagate the test classpath appropriately. For normal invocation, look for the jars
// directory under SPARK_HOME.
boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING"));
- String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql);
+ String jarsDir = findJarsDir(getSparkHome(), !isTesting && !isTestingSql);
if (jarsDir != null) {
addToClassPath(cp, join(File.separator, jarsDir, "*"));
}
@@ -224,25 +221,6 @@ private void addToClassPath(List cp, String entries) {
}
}
- String getScalaVersion() {
- String scala = getenv("SPARK_SCALA_VERSION");
- if (scala != null) {
- return scala;
- }
- String sparkHome = getSparkHome();
- File scala210 = new File(sparkHome, "launcher/target/scala-2.10");
- File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
- checkState(!scala210.isDirectory() || !scala211.isDirectory(),
- "Presence of build for both scala versions (2.10 and 2.11) detected.\n" +
- "Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
- if (scala210.isDirectory()) {
- return "2.10";
- } else {
- checkState(scala211.isDirectory(), "Cannot find any build directories.");
- return "2.11";
- }
- }
-
String getSparkHome() {
String path = getenv(ENV_SPARK_HOME);
checkState(path != null,
diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
index 91586aad7b709..7c8ebd98ff96a 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
@@ -354,7 +354,7 @@ static int javaMajorVersion(String javaVersion) {
* Find the location of the Spark jars dir, depending on whether we're looking at a build
* or a distribution directory.
*/
- static String findJarsDir(String sparkHome, String scalaVersion, boolean failIfNotFound) {
+ static String findJarsDir(String sparkHome, boolean failIfNotFound) {
// TODO: change to the correct directory once the assembly build is changed.
File libdir;
if (new File(sparkHome, "RELEASE").isFile()) {
@@ -363,7 +363,7 @@ static String findJarsDir(String sparkHome, String scalaVersion, boolean failIfN
"Library directory '%s' does not exist.",
libdir.getAbsolutePath());
} else {
- libdir = new File(sparkHome, String.format("assembly/target/scala-%s/jars", scalaVersion));
+ libdir = new File(sparkHome, "assembly/target/jars");
if (!libdir.isDirectory()) {
checkState(!failIfNotFound,
"Library directory '%s' does not exist; make sure Spark is built.",
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 6941ca903cd0a..add5ec37e9667 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -355,8 +355,7 @@ private List findExamplesJars() {
if (new File(sparkHome, "RELEASE").isFile()) {
jarsDir = new File(sparkHome, "examples/jars");
} else {
- jarsDir = new File(sparkHome,
- String.format("examples/target/scala-%s/jars", getScalaVersion()));
+ jarsDir = new File(sparkHome,"examples/target/jars");
}
boolean foundDir = jarsDir.isDirectory();
diff --git a/mllib-local/pom.xml b/mllib-local/pom.xml
index c56561f215926..cc65b7834781d 100644
--- a/mllib-local/pom.xml
+++ b/mllib-local/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
@@ -80,8 +80,4 @@
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/mllib/pom.xml b/mllib/pom.xml
index e56eafc3006bd..580177704f90e 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
@@ -145,8 +145,4 @@
-
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
-
diff --git a/pom.xml b/pom.xml
index 4585c8b9c2b0b..2df2423d0df39 100644
--- a/pom.xml
+++ b/pom.xml
@@ -25,7 +25,7 @@
14
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
pom
Spark Project Parent POM
@@ -141,6 +141,8 @@
8.1.14.v20131031
3.0.0.v201112011016
0.8.0
+
+ 3.0.3
2.4.0
2.0.8
3.1.2
@@ -158,8 +160,6 @@
3.4.1
3.2.2
- 2.11.8
- 2.11
${scala.version}
org.scala-lang
1.9.13
@@ -188,7 +188,7 @@
org.spark_project
- ${project.build.directory}/scala-${scala.binary.version}/jars
+ ${project.build.directory}/jars
prepare-package
@@ -274,6 +274,11 @@
${project.version}
test
+
+ com.esotericsoftware
+ kryo-shaded
+ ${kryo.version}
+
com.twitter
chill_${scala.binary.version}
@@ -1908,7 +1913,6 @@
-->
${test_classpath}
1
- ${scala.binary.version}
1
${test.java.home}
@@ -1957,7 +1961,6 @@
-->
${test_classpath}
1
- ${scala.binary.version}
1
${test.java.home}
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index a58dd7e7f125c..b36660db81d78 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -172,6 +172,15 @@ object SparkBuild extends PomBuild {
publishLocal in MavenCompile <<= publishTask(publishLocalConfiguration in MavenCompile, deliverLocal),
publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn,
+ // Don't append Scala versions to the generated artifacts. We handle this ourselves by doing
+ // this in the POM. In order for our POM names to work properly, we need to disable SBT's
+ // normalization so that spark-foo_2.xx isn't escaped to spark-foo_2-xx.
+ crossPaths := false,
+ normalizedName := name.value,
+ // Don't automatically include Scala library dependency. We manage our dependencies via Maven
+ // POMs and thus do not want SBT to implicitly add additional dependencies.
+ autoScalaLibrary := false,
+
javacOptions in (Compile, doc) ++= {
val versionParts = System.getProperty("java.version").split("[+.\\-]+", 3)
var major = versionParts(0).toInt
@@ -708,7 +717,6 @@ object TestSettings {
"SPARK_DIST_CLASSPATH" ->
(fullClasspath in Test).value.files.map(_.getAbsolutePath).mkString(":").stripSuffix(":"),
"SPARK_PREPEND_CLASSES" -> "1",
- "SPARK_SCALA_VERSION" -> scalaBinaryVersion,
"SPARK_TESTING" -> "1",
"JAVA_HOME" -> sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))),
javaOptions in Test += s"-Djava.io.tmpdir=$testTempDir",
diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala
index cbb88dc7dd1dd..3ac438a7c6f4c 100644
--- a/project/project/SparkPluginBuild.scala
+++ b/project/project/SparkPluginBuild.scala
@@ -24,5 +24,9 @@ import sbt.Keys._
*/
object SparkPluginDef extends Build {
lazy val root = Project("plugins", file(".")) dependsOn(sbtPomReader)
- lazy val sbtPomReader = uri("https://github.com/ScrapCodes/sbt-pom-reader.git#ignore_artifact_id")
+ // This corresponds to https://github.com/ScrapCodes/sbt-pom-reader/commits/test-scoped-depstest-scoped-deps")
+ // This branch contains https://github.com/sbt/sbt-pom-reader/pull/14, a patch to fix test-jar dependencies
+ // which has not yet been merged upstream. Once that's merged, we should remove our custom fork and use
+ // the official sbt-pom-reader release instead; see SPARK-14401
+ lazy val sbtPomReader = uri("https://github.com/ScrapCodes/sbt-pom-reader.git#dee910271ccde761837c19ad1d7d450b1dc63a6d")
}
diff --git a/python/pyspark/streaming/tests.py b/python/pyspark/streaming/tests.py
index 148bf7e8ff5ce..60fff6c6cc26c 100644
--- a/python/pyspark/streaming/tests.py
+++ b/python/pyspark/streaming/tests.py
@@ -1469,8 +1469,7 @@ def get_output(_, rdd):
def search_jar(dir, name_prefix):
# We should ignore the following jars
ignored_jar_suffixes = ("javadoc.jar", "sources.jar", "test-sources.jar", "tests.jar")
- jars = (glob.glob(os.path.join(dir, "target/scala-*/" + name_prefix + "-*.jar")) + # sbt build
- glob.glob(os.path.join(dir, "target/" + name_prefix + "_*.jar"))) # maven build
+ jars = glob.glob(os.path.join(dir, "target/" + name_prefix + "_*.jar"))
return [jar for jar in jars if not jar.endswith(ignored_jar_suffixes)]
diff --git a/python/run-tests.py b/python/run-tests.py
index 38b3bb84c10be..fb08a15fe8704 100755
--- a/python/run-tests.py
+++ b/python/run-tests.py
@@ -54,11 +54,9 @@ def print_red(text):
LOGGER = logging.getLogger()
# Find out where the assembly jars are located.
-for scala in ["2.11", "2.10"]:
- build_dir = os.path.join(SPARK_HOME, "assembly", "target", "scala-" + scala)
- if os.path.isdir(build_dir):
- SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
- break
+build_dir = os.path.join(SPARK_HOME, "assembly", "target")
+if os.path.isdir(build_dir):
+ SPARK_DIST_CLASSPATH = os.path.join(build_dir, "jars", "*")
else:
raise Exception("Cannot find assembly build directory, please build Spark first.")
diff --git a/repl/pom.xml b/repl/pom.xml
index 0f396c9b809bd..0ceacfa86a9a3 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -20,7 +20,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../pom.xml
@@ -119,8 +119,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes
diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml
index 1748fa2778d6a..6cd9b7d0d0362 100644
--- a/sql/catalyst/pom.xml
+++ b/sql/catalyst/pom.xml
@@ -21,7 +21,7 @@
4.0.0
org.apache.spark
- spark-parent_2.11
+ spark-parent
2.0.0-SNAPSHOT
../../pom.xml
@@ -59,7 +59,7 @@
org.apache.spark
- spark-unsafe_${scala.binary.version}
+ spark-unsafe
${project.version}
@@ -81,8 +81,6 @@
- target/scala-${scala.binary.version}/classes
- target/scala-${scala.binary.version}/test-classes