From 2eb900be6c6de431bdb9b976acdf50a40b521267 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 5 Apr 2017 04:10:19 -0400 Subject: [PATCH 1/4] [SPARK-20204] SQLConf should react to change in default timezone settings --- .../spark/internal/config/ConfigBuilder.scala | 9 +++++++++ .../spark/internal/config/ConfigEntry.scala | 17 +++++++++++++++++ .../org/apache/spark/sql/internal/SQLConf.scala | 2 +- 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala index b9921138cc6c7..9cdb9ac335b91 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala @@ -147,6 +147,15 @@ private[spark] class TypedConfigBuilder[T]( } } + /** Creates a [[ConfigEntry]] with a function has a default value */ + def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = { + val entry = + new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter, + stringConverter, parent._doc, parent._public) + parent._onCreate.foreach(_ (entry)) + entry + } + /** * Creates a [[ConfigEntry]] that has a default value. The default value is provided as a * [[String]] and must be a valid value for the entry. diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala index 4f3e42bb3c94e..e86712e84d6ac 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala @@ -78,7 +78,24 @@ private class ConfigEntryWithDefault[T] ( def readFrom(reader: ConfigReader): T = { reader.get(key).map(valueConverter).getOrElse(_defaultValue) } +} + +private class ConfigEntryWithDefaultFunction[T] ( + key: String, + _defaultFunction: () => T, + valueConverter: String => T, + stringConverter: T => String, + doc: String, + isPublic: Boolean) + extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) { + + override def defaultValue: Option[T] = Some(_defaultFunction()) + override def defaultValueString: String = stringConverter(_defaultFunction()) + + def readFrom(reader: ConfigReader): T = { + reader.get(key).map(valueConverter).getOrElse(_defaultFunction()) + } } private class ConfigEntryWithDefaultString[T] ( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 5b5d547f8fe54..2802c0e2b461d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -752,7 +752,7 @@ object SQLConf { buildConf("spark.sql.session.timeZone") .doc("""The ID of session local timezone, e.g. "GMT", "America/Los_Angeles", etc.""") .stringConf - .createWithDefault(TimeZone.getDefault().getID()) + .createWithDefaultFunction(() => TimeZone.getDefault().getID()) val WINDOW_EXEC_BUFFER_SPILL_THRESHOLD = buildConf("spark.sql.windowExec.buffer.spill.threshold") From 6423b51b0ec84d9bcc8cf049fe5416d2a21d1c20 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 5 Apr 2017 05:28:37 -0400 Subject: [PATCH 2/4] Review comments --- .../spark/internal/config/ConfigBuilder.scala | 3 +-- .../internal/config/ConfigEntrySuite.scala | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala index 9cdb9ac335b91..b0c3ce838005a 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala @@ -149,8 +149,7 @@ private[spark] class TypedConfigBuilder[T]( /** Creates a [[ConfigEntry]] with a function has a default value */ def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = { - val entry = - new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter, + val entry = new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter, stringConverter, parent._doc, parent._public) parent._onCreate.foreach(_ (entry)) entry diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala index 3ff7e84d73bd4..870ec61e3e182 100644 --- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala @@ -17,6 +17,7 @@ package org.apache.spark.internal.config +import java.util.TimeZone import java.util.concurrent.TimeUnit import org.apache.spark.{SparkConf, SparkFunSuite} @@ -51,6 +52,26 @@ class ConfigEntrySuite extends SparkFunSuite { assert(conf.get(dConf) === 20.0) } + test("conf entry: timezone") { + val tzStart = TimeZone.getDefault().getID() + val conf = new SparkConf() + val dConf = ConfigBuilder(testKey("tz")) + .stringConf + .createWithDefaultFunction(() => TimeZone.getDefault().getID()) + + val tzConf = conf.get(dConf) + assert(tzStart === tzConf) + + // Pick a timezone which is not the current timezone + val availableTzs: Seq[String] = TimeZone.getAvailableIDs(); + val newTz = availableTzs.find(_ != tzStart).getOrElse(tzStart) + TimeZone.setDefault(TimeZone.getTimeZone(newTz)) + + val tzChanged = conf.get(dConf) + assert(tzChanged === newTz) + TimeZone.setDefault(TimeZone.getTimeZone(tzStart)) + } + test("conf entry: boolean") { val conf = new SparkConf() val bConf = ConfigBuilder(testKey("boolean")).booleanConf.createWithDefault(false) From 9187cca6d66a990e17b3f1a694779cef48e5f6c7 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 5 Apr 2017 12:02:00 -0400 Subject: [PATCH 3/4] review : test --- .../internal/config/ConfigEntrySuite.scala | 27 +++++++------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala index 870ec61e3e182..c7c9c43741744 100644 --- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala @@ -52,25 +52,7 @@ class ConfigEntrySuite extends SparkFunSuite { assert(conf.get(dConf) === 20.0) } - test("conf entry: timezone") { - val tzStart = TimeZone.getDefault().getID() - val conf = new SparkConf() - val dConf = ConfigBuilder(testKey("tz")) - .stringConf - .createWithDefaultFunction(() => TimeZone.getDefault().getID()) - - val tzConf = conf.get(dConf) - assert(tzStart === tzConf) - // Pick a timezone which is not the current timezone - val availableTzs: Seq[String] = TimeZone.getAvailableIDs(); - val newTz = availableTzs.find(_ != tzStart).getOrElse(tzStart) - TimeZone.setDefault(TimeZone.getTimeZone(newTz)) - - val tzChanged = conf.get(dConf) - assert(tzChanged === newTz) - TimeZone.setDefault(TimeZone.getTimeZone(tzStart)) - } test("conf entry: boolean") { val conf = new SparkConf() @@ -272,4 +254,13 @@ class ConfigEntrySuite extends SparkFunSuite { .createWithDefault(null) testEntryRef(nullConf, ref(nullConf)) } + + test("conf entry : default function") { + var data = 0 + val conf = new SparkConf() + val iConf = ConfigBuilder(testKey("int")).intConf.createWithDefaultFunction(() => data) + assert(conf.get(iConf) === 0) + data = 2 + assert(conf.get(iConf) === 2) + } } From 1fb23cf8417bdb0da7c01c7e646b7e16c1fd511a Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 5 Apr 2017 17:13:17 -0400 Subject: [PATCH 4/4] review comments --- .../org/apache/spark/internal/config/ConfigBuilder.scala | 2 +- .../org/apache/spark/internal/config/ConfigEntrySuite.scala | 5 +---- .../main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala index b0c3ce838005a..e5d60a7ef0984 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala @@ -147,7 +147,7 @@ private[spark] class TypedConfigBuilder[T]( } } - /** Creates a [[ConfigEntry]] with a function has a default value */ + /** Creates a [[ConfigEntry]] with a function to determine the default value */ def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = { val entry = new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter, stringConverter, parent._doc, parent._public) diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala index c7c9c43741744..e2ba0d2a53d04 100644 --- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.internal.config -import java.util.TimeZone import java.util.concurrent.TimeUnit import org.apache.spark.{SparkConf, SparkFunSuite} @@ -52,8 +51,6 @@ class ConfigEntrySuite extends SparkFunSuite { assert(conf.get(dConf) === 20.0) } - - test("conf entry: boolean") { val conf = new SparkConf() val bConf = ConfigBuilder(testKey("boolean")).booleanConf.createWithDefault(false) @@ -258,7 +255,7 @@ class ConfigEntrySuite extends SparkFunSuite { test("conf entry : default function") { var data = 0 val conf = new SparkConf() - val iConf = ConfigBuilder(testKey("int")).intConf.createWithDefaultFunction(() => data) + val iConf = ConfigBuilder(testKey("intval")).intConf.createWithDefaultFunction(() => data) assert(conf.get(iConf) === 0) data = 2 assert(conf.get(iConf) === 2) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 2802c0e2b461d..e685c2bed50ae 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -752,7 +752,7 @@ object SQLConf { buildConf("spark.sql.session.timeZone") .doc("""The ID of session local timezone, e.g. "GMT", "America/Los_Angeles", etc.""") .stringConf - .createWithDefaultFunction(() => TimeZone.getDefault().getID()) + .createWithDefaultFunction(() => TimeZone.getDefault.getID) val WINDOW_EXEC_BUFFER_SPILL_THRESHOLD = buildConf("spark.sql.windowExec.buffer.spill.threshold")