diff --git a/assembly/pom.xml b/assembly/pom.xml
index 78fb908f9a9ef..b2a9d0780ee2b 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -354,5 +354,25 @@
+
+
+
+ hadoop-provided
+
+ provided
+
+
+
+ hive-provided
+
+ provided
+
+
+
+ parquet-provided
+
+ provided
+
+
diff --git a/bagel/pom.xml b/bagel/pom.xml
index 3bcd38fa3245c..510e92640eff8 100644
--- a/bagel/pom.xml
+++ b/bagel/pom.xml
@@ -40,10 +40,6 @@
spark-core_${scala.binary.version}
${project.version}
-
- org.eclipse.jetty
- jetty-server
-
org.scalacheck
scalacheck_${scala.binary.version}
diff --git a/bin/compute-classpath.cmd b/bin/compute-classpath.cmd
index a4c099fb45b14..088f993954d9e 100644
--- a/bin/compute-classpath.cmd
+++ b/bin/compute-classpath.cmd
@@ -109,6 +109,13 @@ if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
:no_yarn_conf_dir
+rem To allow for distributions to append needed libraries to the classpath (e.g. when
+rem using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
+rem append it to tbe final classpath.
+if not "x%$SPARK_DIST_CLASSPATH%"=="x" (
+ set CLASSPATH=%CLASSPATH%;%SPARK_DIST_CLASSPATH%
+)
+
rem A bit of a hack to allow calling this script within run2.cmd without seeing output
if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index a31ea73d3ce19..8f3b396ffd086 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -146,4 +146,11 @@ if [ -n "$YARN_CONF_DIR" ]; then
CLASSPATH="$CLASSPATH:$YARN_CONF_DIR"
fi
+# To allow for distributions to append needed libraries to the classpath (e.g. when
+# using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
+# append it to tbe final classpath.
+if [ -n "$SPARK_DIST_CLASSPATH" ]; then
+ CLASSPATH="$CLASSPATH:$SPARK_DIST_CLASSPATH"
+fi
+
echo "$CLASSPATH"
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 8c7de75600b5f..7eb87a564d6f5 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -55,19 +55,26 @@ private[spark] class SparkDeploySchedulerBackend(
"{{WORKER_URL}}")
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions")
.map(Utils.splitCommandString).getOrElse(Seq.empty)
- val classPathEntries = sc.conf.getOption("spark.executor.extraClassPath").toSeq.flatMap { cp =>
- cp.split(java.io.File.pathSeparator)
- }
- val libraryPathEntries =
- sc.conf.getOption("spark.executor.extraLibraryPath").toSeq.flatMap { cp =>
- cp.split(java.io.File.pathSeparator)
+ val classPathEntries = sc.conf.getOption("spark.executor.extraClassPath")
+ .map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
+ val libraryPathEntries = sc.conf.getOption("spark.executor.extraLibraryPath")
+ .map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
+
+ // When testing, expose the parent class path to the child. This is processed by
+ // compute-classpath.{cmd,sh} and makes all needed jars available to child processes
+ // when the assembly is built with the "*-provided" profiles enabled.
+ val testingClassPath =
+ if (sys.props.contains("spark.testing")) {
+ sys.props("java.class.path").split(java.io.File.pathSeparator).toSeq
+ } else {
+ Nil
}
// Start executors with a few necessary configs for registering with the scheduler
val sparkJavaOpts = Utils.sparkJavaOpts(conf, SparkConf.isExecutorStartupConf)
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
- args, sc.executorEnvs, classPathEntries, libraryPathEntries, javaOpts)
+ args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
appUIAddress, sc.eventLogDir)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 9d6b6161ce4da..c4f1898a2db15 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -990,11 +990,12 @@ private[spark] object Utils extends Logging {
for ((key, value) <- extraEnvironment) {
environment.put(key, value)
}
+
val process = builder.start()
new Thread("read stderr for " + command(0)) {
override def run() {
for (line <- Source.fromInputStream(process.getErrorStream).getLines()) {
- System.err.println(line)
+ logInfo(line)
}
}
}.start()
@@ -1089,7 +1090,7 @@ private[spark] object Utils extends Logging {
var firstUserLine = 0
var insideSpark = true
var callStack = new ArrayBuffer[String]() :+ ""
-
+
Thread.currentThread.getStackTrace().foreach { ste: StackTraceElement =>
// When running under some profilers, the current stack trace might contain some bogus
// frames. This is intended to ensure that we don't crash in these situations by
diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index 541d8eac80556..8a54360e81795 100644
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -35,7 +35,7 @@ class DriverSuite extends FunSuite with Timeouts {
forAll(masters) { (master: String) =>
failAfter(60 seconds) {
Utils.executeAndGetOutput(
- Seq("./bin/spark-class", "org.apache.spark.DriverWithoutCleanup", master),
+ Seq(s"$sparkHome/bin/spark-class", "org.apache.spark.DriverWithoutCleanup", master),
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))
}
diff --git a/examples/pom.xml b/examples/pom.xml
index bdc5d0562f3e1..002d4458c4b3e 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -98,143 +98,145 @@
${project.version}
- org.eclipse.jetty
- jetty-server
+ org.apache.hbase
+ hbase-testing-util
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+
+ org.apache.hbase
+ hbase-annotations
+
+
+ org.jruby
+ jruby-complete
+
+
+
+
+ org.apache.hbase
+ hbase-protocol
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+
+ org.apache.hbase
+ hbase-annotations
+
+
+
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+
+ org.apache.hbase
+ hbase-annotations
+
+
+ io.netty
+ netty
+
+
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+
+ org.apache.hbase
+ hbase-annotations
+
+
+ org.apache.hadoop
+ hadoop-core
+
+
+ org.apache.hadoop
+ hadoop-client
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+
+
+ org.apache.hadoop
+ hadoop-auth
+
+
+ org.apache.hadoop
+ hadoop-annotations
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+
+
+ org.apache.hbase
+ hbase-hadoop1-compat
+
+
+ org.apache.commons
+ commons-math
+
+
+ com.sun.jersey
+ jersey-core
+
+
+ org.slf4j
+ slf4j-api
+
+
+ com.sun.jersey
+ jersey-server
+
+
+ com.sun.jersey
+ jersey-core
+
+
+ com.sun.jersey
+ jersey-json
+
+
+
+ commons-io
+ commons-io
+
+
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ ${hbase.deps.scope}
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test-jar
+ test
-
- org.apache.hbase
- hbase-testing-util
- ${hbase.version}
-
-
-
- org.apache.hbase
- hbase-annotations
-
-
- org.jruby
- jruby-complete
-
-
-
-
- org.apache.hbase
- hbase-protocol
- ${hbase.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.version}
-
-
-
- org.apache.hbase
- hbase-annotations
-
-
-
-
- org.apache.hbase
- hbase-client
- ${hbase.version}
-
-
-
- org.apache.hbase
- hbase-annotations
-
-
- io.netty
- netty
-
-
-
-
- org.apache.hbase
- hbase-server
- ${hbase.version}
-
-
- org.apache.hadoop
- hadoop-core
-
-
- org.apache.hadoop
- hadoop-client
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
-
-
- org.apache.hadoop
- hadoop-auth
-
-
-
- org.apache.hbase
- hbase-annotations
-
-
- org.apache.hadoop
- hadoop-annotations
-
-
- org.apache.hadoop
- hadoop-hdfs
-
-
- org.apache.hbase
- hbase-hadoop1-compat
-
-
- org.apache.commons
- commons-math
-
-
- com.sun.jersey
- jersey-core
-
-
- org.slf4j
- slf4j-api
-
-
- com.sun.jersey
- jersey-server
-
-
- com.sun.jersey
- jersey-core
-
-
- com.sun.jersey
- jersey-json
-
-
-
- commons-io
- commons-io
-
-
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.version}
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.version}
- test-jar
- test
-
org.apache.commons
commons-math3
@@ -308,31 +310,6 @@
org.apache.maven.plugins
maven-shade-plugin
-
- false
- ${project.build.directory}/scala-${scala.binary.version}/spark-examples-${project.version}-hadoop${hadoop.version}.jar
-
-
- *:*
-
-
-
-
- com.google.guava:guava
-
- com/google/common/base/Optional*
-
-
-
- *:*
-
- META-INF/*.SF
- META-INF/*.DSA
- META-INF/*.RSA
-
-
-
-
package
@@ -340,6 +317,34 @@
shade
+ false
+ ${project.build.directory}/scala-${scala.binary.version}/spark-examples-${project.version}-hadoop${hadoop.version}.jar
+
+
+ *:*
+
+
+
+
+ com.google.guava:guava
+
+
+ **
+
+
+
+ *:*
+
+ META-INF/*.SF
+ META-INF/*.DSA
+ META-INF/*.RSA
+
+
+
com.google
@@ -411,7 +416,7 @@
-
scala-2.10
@@ -449,5 +454,37 @@
+
+
+
+ flume-provided
+
+ provided
+
+
+
+ hadoop-provided
+
+ provided
+
+
+
+ hbase-provided
+
+ provided
+
+
+
+ hive-provided
+
+ provided
+
+
+
+ parquet-provided
+
+ provided
+
+
diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml
index 71f595d0a6800..0706f1ebf66e2 100644
--- a/external/flume-sink/pom.xml
+++ b/external/flume-sink/pom.xml
@@ -38,32 +38,10 @@
org.apache.flume
flume-ng-sdk
- ${flume.version}
-
-
- io.netty
- netty
-
-
- org.apache.thrift
- libthrift
-
-
org.apache.flume
flume-ng-core
- ${flume.version}
-
-
- io.netty
- netty
-
-
- org.apache.thrift
- libthrift
-
-
org.scala-lang
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index 0374262212e08..1f2681394c583 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -46,20 +46,13 @@
spark-streaming-flume-sink_${scala.binary.version}
${project.version}
+
+ org.apache.flume
+ flume-ng-core
+
org.apache.flume
flume-ng-sdk
- ${flume.version}
-
-
- io.netty
- netty
-
-
- org.apache.thrift
- libthrift
-
-
org.scalacheck
diff --git a/external/zeromq/pom.xml b/external/zeromq/pom.xml
index 2fb5f0ed2f57c..e919c2c9b19ea 100644
--- a/external/zeromq/pom.xml
+++ b/external/zeromq/pom.xml
@@ -44,7 +44,6 @@
${akka.group}
akka-zeromq_${scala.binary.version}
- ${akka.version}
org.scalacheck
diff --git a/graphx/pom.xml b/graphx/pom.xml
index 91db799d244ad..72374aae6da9b 100644
--- a/graphx/pom.xml
+++ b/graphx/pom.xml
@@ -45,10 +45,6 @@
jblas
${jblas.version}
-
- org.eclipse.jetty
- jetty-server
-
org.scalacheck
scalacheck_${scala.binary.version}
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 2198757481684..a0bda89ccaa71 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -29,7 +29,7 @@
spark-mllib_2.10
mllib
-
+
jar
Spark Project ML Library
http://spark.apache.org/
@@ -50,10 +50,6 @@
spark-sql_${scala.binary.version}
${project.version}
-
- org.eclipse.jetty
- jetty-server
-
org.jblas
jblas
diff --git a/pom.xml b/pom.xml
index 46ff211f91160..703e5c47bf59b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -123,8 +123,10 @@
2.4.1
${hadoop.version}
0.94.6
+ hbase
1.4.0
3.4.5
+ org.spark-project.hive
0.13.1a
@@ -143,13 +145,36 @@
4.2.6
3.1.1
${project.build.directory}/spark-test-classpath.txt
- 64m
- 512m
2.10.4
2.10
${scala.version}
org.scala-lang
- 1.8.8
+ 1.8.8
+ 1.1.1.6
+
+
+ compile
+ compile
+ compile
+ compile
+ compile
+
+
+ ${session.executionRootDirectory}
+
+ 64m
+ 512m
+ 512m
@@ -244,21 +269,20 @@
-
-
+
org.spark-project.spark
unused
1.0.0
org.codehaus.groovy
@@ -369,11 +393,13 @@
org.slf4j
slf4j-api
${slf4j.version}
+ ${hadoop.deps.scope}
org.slf4j
slf4j-log4j12
${slf4j.version}
+ ${hadoop.deps.scope}
org.slf4j
@@ -390,6 +416,7 @@
log4j
log4j
${log4j.version}
+ ${hadoop.deps.scope}
com.ning
@@ -399,7 +426,8 @@
org.xerial.snappy
snappy-java
- 1.1.1.6
+ ${snappy.version}
+ ${hadoop.deps.scope}
net.jpountz.lz4
@@ -427,6 +455,7 @@
com.google.protobuf
protobuf-java
${protobuf.version}
+ ${hadoop.deps.scope}
${akka.group}
@@ -448,6 +477,17 @@
akka-testkit_${scala.binary.version}
${akka.version}
+
+ ${akka.group}
+ akka-zeromq_${scala.binary.version}
+ ${akka.version}
+
+
+ ${akka.group}
+ akka-actor_${scala.binary.version}
+
+
+
org.apache.mesos
mesos
@@ -577,6 +617,7 @@
org.apache.curator
curator-recipes
2.4.0
+ ${hadoop.deps.scope}
org.jboss.netty
@@ -588,6 +629,7 @@
org.apache.hadoop
hadoop-client
${hadoop.version}
+ ${hadoop.deps.scope}
asm
@@ -623,11 +665,13 @@
org.apache.avro
avro
${avro.version}
+ ${hadoop.deps.scope}
org.apache.avro
avro-ipc
${avro.version}
+ ${hadoop.deps.scope}
io.netty
@@ -656,6 +700,7 @@
avro-mapred
${avro.version}
${avro.mapred.classifier}
+ ${hive.deps.scope}
io.netty
@@ -684,6 +729,7 @@
net.java.dev.jets3t
jets3t
${jets3t.version}
+ ${hadoop.deps.scope}
commons-logging
@@ -695,6 +741,7 @@
org.apache.hadoop
hadoop-yarn-api
${yarn.version}
+ ${hadoop.deps.scope}
javax.servlet
@@ -722,6 +769,7 @@
org.apache.hadoop
hadoop-yarn-common
${yarn.version}
+ ${hadoop.deps.scope}
asm
@@ -778,6 +826,7 @@
org.apache.hadoop
hadoop-yarn-server-web-proxy
${yarn.version}
+ ${hadoop.deps.scope}
asm
@@ -805,6 +854,7 @@
org.apache.hadoop
hadoop-yarn-client
${yarn.version}
+ ${hadoop.deps.scope}
asm
@@ -829,15 +879,126 @@
-
- org.codehaus.jackson
- jackson-mapper-asl
- ${jackson.version}
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+ ${hadoop.deps.scope}
org.codehaus.jackson
jackson-core-asl
- ${jackson.version}
+ ${codehaus.jackson.version}
+ ${hadoop.deps.scope}
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+ ${codehaus.jackson.version}
+ ${hadoop.deps.scope}
+
+
+ ${hive.group}
+ hive-beeline
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ ${hive.group}
+ hive-cli
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ ${hive.group}
+ hive-exec
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ commons-logging
+ commons-logging
+
+
+ com.esotericsoftware.kryo
+ kryo
+
+
+
+
+ ${hive.group}
+ hive-jdbc
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ ${hive.group}
+ hive-metastore
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ ${hive.group}
+ hive-serde
+ ${hive.version}
+ ${hive.deps.scope}
+
+
+ commons-logging
+ commons-logging
+
+
+ commons-logging
+ commons-logging-api
+
+
+
+
+ com.twitter
+ parquet-column
+ ${parquet.version}
+ ${parquet.deps.scope}
+
+
+ com.twitter
+ parquet-hadoop
+ ${parquet.version}
+ ${parquet.deps.scope}
+
+
+ org.apache.flume
+ flume-ng-core
+ ${flume.version}
+ ${flume.deps.scope}
+
+
+ io.netty
+ netty
+
+
+ org.apache.thrift
+ libthrift
+
+
+ org.mortbay.jetty
+ servlet-api
+
+
+
+
+ org.apache.flume
+ flume-ng-sdk
+ ${flume.version}
+ ${flume.deps.scope}
+
+
+ io.netty
+ netty
+
+
+ org.apache.thrift
+ libthrift
+
+
@@ -914,6 +1075,7 @@
-Xmx1024m
-XX:PermSize=${PermGen}
-XX:MaxPermSize=${MaxPermGen}
+ -XX:ReservedCodeCacheSize=${CodeCacheSize}
-source
@@ -980,15 +1142,21 @@
${project.build.directory}/surefire-reports
.
SparkTestSuite.txt
- -ea -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m
+ -ea -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=${CodeCacheSize}
+
+
+ ${test_classpath}
+
true
- ${session.executionRootDirectory}
+ ${spark.test.home}
1
false
false
- ${test_classpath}
true
@@ -1011,11 +1179,6 @@
maven-antrun-plugin
1.7
-
- org.apache.maven.plugins
- maven-shade-plugin
- 2.2
-
org.apache.maven.plugins
maven-source-plugin
@@ -1104,6 +1267,7 @@
org.apache.maven.plugins
maven-shade-plugin
+ 2.2
false
@@ -1373,53 +1537,6 @@
-
-
- hadoop-provided
-
-
- org.apache.hadoop
- hadoop-client
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-api
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-common
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-server-web-proxy
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-client
- provided
-
-
- org.apache.avro
- avro
- provided
-
-
- org.apache.avro
- avro-ipc
- provided
-
-
- org.apache.zookeeper
- zookeeper
- ${zookeeper.version}
- provided
-
-
-
hive-thriftserver
@@ -1472,5 +1589,25 @@
+
+
+ flume-provided
+
+
+ hadoop-provided
+
+
+ hbase-provided
+
+
+ hive-provided
+
+
+ parquet-provided
+
diff --git a/repl/pom.xml b/repl/pom.xml
index 97165e024926e..0bc8bccf90a6d 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -68,10 +68,6 @@
${project.version}
test
-
- org.eclipse.jetty
- jetty-server
-
org.scala-lang
scala-compiler
@@ -103,13 +99,6 @@
true
-
- org.apache.maven.plugins
- maven-install-plugin
-
- true
-
-
org.codehaus.mojo
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index 023ce2041bb86..3e9ef07df9db6 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -56,12 +56,10 @@
com.twitter
parquet-column
- ${parquet.version}
com.twitter
parquet-hadoop
- ${parquet.version}
com.fasterxml.jackson.core
diff --git a/sql/hive-thriftserver/pom.xml b/sql/hive-thriftserver/pom.xml
index d3a517375cf25..259eef0b80d03 100644
--- a/sql/hive-thriftserver/pom.xml
+++ b/sql/hive-thriftserver/pom.xml
@@ -42,19 +42,16 @@
${project.version}
- org.spark-project.hive
+ ${hive.group}
hive-cli
- ${hive.version}
- org.spark-project.hive
+ ${hive.group}
hive-jdbc
- ${hive.version}
- org.spark-project.hive
+ ${hive.group}
hive-beeline
- ${hive.version}
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index e8ffbc5b954d4..60953576d0e37 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -48,6 +48,7 @@ class CliSuite extends FunSuite with BeforeAndAfterAll with Logging {
| --master local
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
+ | --driver-class-path ${sys.props("java.class.path")}
""".stripMargin.split("\\s+").toSeq ++ extraArgs
}
@@ -70,7 +71,7 @@ class CliSuite extends FunSuite with BeforeAndAfterAll with Logging {
}
// Searching expected output line from both stdout and stderr of the CLI process
- val process = (Process(command) #< queryStream).run(
+ val process = (Process(command, None) #< queryStream).run(
ProcessLogger(captureOutput("stdout"), captureOutput("stderr")))
try {
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
index 94d5ed4f1d15e..7814aa38f4146 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
@@ -142,6 +142,7 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost
| --hiveconf ${ConfVars.HIVE_SERVER2_TRANSPORT_MODE}=http
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT}=$port
+ | --driver-class-path ${sys.props("java.class.path")}
""".stripMargin.split("\\s+").toSeq
} else {
s"""$startScript
@@ -151,6 +152,7 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_PORT}=$port
+ | --driver-class-path ${sys.props("java.class.path")}
""".stripMargin.split("\\s+").toSeq
}
@@ -179,8 +181,9 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
}
}
- // Resets SPARK_TESTING to avoid loading Log4J configurations in testing class paths
- val env = Seq("SPARK_TESTING" -> "0")
+ val env = Seq(
+ // Resets SPARK_TESTING to avoid loading Log4J configurations in testing class paths
+ "SPARK_TESTING" -> "0")
Process(command, None, env: _*).run(ProcessLogger(
captureThriftServerOutput("stdout"),
@@ -214,7 +217,7 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
} finally {
warehousePath.delete()
metastorePath.delete()
- Process(stopScript).run().exitValue()
+ Process(stopScript, None, env: _*).run().exitValue()
// The `spark-daemon.sh' script uses kill, which is not synchronous, have to wait for a while.
Thread.sleep(3.seconds.toMillis)
Option(logTailingProcess).map(_.destroy())
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index 46aacad01113f..58b0722464be8 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -47,9 +47,8 @@
${project.version}
- org.spark-project.hive
+ ${hive.group}
hive-metastore
- ${hive.version}
commons-httpclient
@@ -57,51 +56,27 @@
3.1
- org.spark-project.hive
+ ${hive.group}
hive-exec
- ${hive.version}
-
-
- commons-logging
- commons-logging
-
-
- com.esotericsoftware.kryo
- kryo
-
-
org.codehaus.jackson
jackson-mapper-asl
- org.spark-project.hive
+ ${hive.group}
hive-serde
- ${hive.version}
-
-
- commons-logging
- commons-logging
-
-
- commons-logging
- commons-logging-api
-
-
org.apache.avro
avro
- ${avro.version}
org.apache.avro
avro-mapred
- ${avro.version}
${avro.mapred.classifier}
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 2023210d9b9be..d3c6d0347a622 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -68,13 +68,13 @@
target/scala-${scala.binary.version}/classes
target/scala-${scala.binary.version}/test-classes
-
diff --git a/yarn/pom.xml b/yarn/pom.xml
index bcb77b3e3c70e..b86857db7bde6 100644
--- a/yarn/pom.xml
+++ b/yarn/pom.xml
@@ -131,13 +131,6 @@
true
-
- org.apache.maven.plugins
- maven-install-plugin
-
- true
-
-
target/scala-${scala.binary.version}/classes
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 8d0543771309b..c363d755c1752 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -367,6 +367,10 @@ private[spark] class Client(
}
}
+ sys.env.get(ENV_DIST_CLASSPATH).foreach { dcp =>
+ env(ENV_DIST_CLASSPATH) = dcp
+ }
+
env
}
@@ -652,6 +656,9 @@ object Client extends Logging {
val APP_FILE_PERMISSION: FsPermission =
FsPermission.createImmutable(Integer.parseInt("644", 8).toShort)
+ // Distribution-defined classpath to add to processes
+ val ENV_DIST_CLASSPATH = "SPARK_DIST_CLASSPATH"
+
/**
* Find the user-defined Spark jar if configured, or return the jar containing this
* class if not.