From 500fe15c3ef7c6e835d09dc9d9025c58c3467a76 Mon Sep 17 00:00:00 2001 From: Herman van Hovell Date: Fri, 14 Aug 2015 13:18:42 -0400 Subject: [PATCH 1/4] Replace << by HTML friendly << --- .../java/org/apache/spark/unsafe/memory/TaskMemoryManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java b/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java index 358bb37250158..c6e1e98e54bfa 100644 --- a/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java +++ b/unsafe/src/main/java/org/apache/spark/unsafe/memory/TaskMemoryManager.java @@ -60,7 +60,7 @@ public class TaskMemoryManager { /** * Maximum supported data page size (in bytes). In principle, the maximum addressable page size is - * (1L << OFFSET_BITS) bytes, which is 2+ petabytes. However, the on-heap allocator's maximum page + * (1L << OFFSET_BITS) bytes, which is 2+ petabytes. However, the on-heap allocator's maximum page * size is limited by the maximum amount of data that can be stored in a long[] array, which is * (2^32 - 1) * 8 bytes (or 16 gigabytes). Therefore, we cap this at 16 gigabytes. */ From 4a743c2cb7e6a4aa86e693f6b48e5528334ce5d6 Mon Sep 17 00:00:00 2001 From: Herman van Hovell Date: Fri, 14 Aug 2015 13:33:09 -0400 Subject: [PATCH 2/4] Replace self closing

statements witt

. --- .../main/java/org/apache/spark/launcher/SparkLauncher.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java index 03c9358bc865d..57993405e47be 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java @@ -193,7 +193,7 @@ public SparkLauncher setMainClass(String mainClass) { * Adds a no-value argument to the Spark invocation. If the argument is known, this method * validates whether the argument is indeed a no-value argument, and throws an exception * otherwise. - *

+ *

* Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * @@ -211,10 +211,10 @@ public SparkLauncher addSparkArg(String arg) { * Adds an argument with a value to the Spark invocation. If the argument name corresponds to * a known argument, the code validates that the argument actually expects a value, and throws * an exception otherwise. - *

+ *

* It is safe to add arguments modified by other methods in this class (such as * {@link #setMaster(String)} - the last invocation will be the one to take effect. - *

+ *

* Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * From b37b0b6a31a87cf6eadc131fec08a10ff23490c4 Mon Sep 17 00:00:00 2001 From: Herman van Hovell Date: Fri, 14 Aug 2015 14:40:32 -0400 Subject: [PATCH 3/4] Replace self closing

statements witt

in .java files. --- .../apache/spark/examples/ml/JavaDeveloperApiExample.java | 4 ++-- .../examples/streaming/JavaStatefulNetworkWordCount.java | 2 +- .../src/main/java/org/apache/spark/launcher/Main.java | 4 ++-- .../apache/spark/launcher/SparkClassCommandBuilder.java | 2 +- .../apache/spark/launcher/SparkSubmitCommandBuilder.java | 4 ++-- .../apache/spark/launcher/SparkSubmitOptionParser.java | 8 ++++---- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java b/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java index 3f1fe900b0008..a377694507d29 100644 --- a/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java +++ b/examples/src/main/java/org/apache/spark/examples/ml/JavaDeveloperApiExample.java @@ -124,7 +124,7 @@ public String uid() { /** * Param for max number of iterations - *

+ *

* NOTE: The usual way to add a parameter to a model or algorithm is to include: * - val myParamName: ParamType * - def getMyParamName @@ -222,7 +222,7 @@ public Vector predictRaw(Vector features) { /** * Create a copy of the model. * The copy is shallow, except for the embedded paramMap, which gets a deep copy. - *

+ *

* This is used for the defaul implementation of [[transform()]]. * * In Java, we have to make this method public since Java does not understand Scala's protected diff --git a/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java b/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java index 02f58f48b07ab..99b63a2590ae2 100644 --- a/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java +++ b/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java @@ -45,7 +45,7 @@ * Usage: JavaStatefulNetworkWordCount * and describe the TCP server that Spark Streaming would connect to receive * data. - *

+ *

* To run this on your local machine, you need to first run a Netcat server * `$ nc -lk 9999` * and then run the example diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java b/launcher/src/main/java/org/apache/spark/launcher/Main.java index 62492f9baf3bb..a4e3acc674f36 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/Main.java +++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java @@ -32,7 +32,7 @@ class Main { /** * Usage: Main [class] [class args] - *

+ *

* This CLI works in two different modes: *