From d032bf930be1816694073f7d45ea92c53813c280 Mon Sep 17 00:00:00 2001 From: wangfei Date: Wed, 13 Aug 2014 22:06:07 +0800 Subject: [PATCH 1/4] [SQL]Excess judgment --- sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala index 35c51dec0bcf5..2a22ad746a386 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala @@ -87,8 +87,7 @@ trait SQLConf { * * Defaults to false as this feature is currently experimental. */ - private[spark] def codegenEnabled: Boolean = - if (getConf(CODEGEN_ENABLED, "false") == "true") true else false + private[spark] def codegenEnabled: Boolean = getConf(CODEGEN_ENABLED, "false") == "true" /** * Upper bound on the sizes (in bytes) of the tables qualified for the auto conversion to From e274515642e7a0063bebf315463cce0e0b702762 Mon Sep 17 00:00:00 2001 From: scwf Date: Wed, 13 Aug 2014 22:43:58 +0800 Subject: [PATCH 2/4] fix redundant conditions --- core/src/main/scala/org/apache/spark/SecurityManager.scala | 4 ++-- .../org/apache/spark/rdd/PartitionPruningRDDSuite.scala | 6 +++--- .../scala/org/apache/spark/sql/columnar/ColumnType.scala | 4 +--- .../scala/org/apache/spark/deploy/yarn/ClientBase.scala | 2 +- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala index 25c2c9fc6af7c..76dd855c54dbf 100644 --- a/core/src/main/scala/org/apache/spark/SecurityManager.scala +++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala @@ -294,7 +294,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging { def checkUIViewPermissions(user: String): Boolean = { logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " viewAcls=" + viewAcls.mkString(",")) - if (aclsEnabled() && (user != null) && (!viewAcls.contains(user))) false else true + (aclsEnabled() && (user != null) && (!viewAcls.contains(user))) == false } /** @@ -309,7 +309,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging { def checkModifyPermissions(user: String): Boolean = { logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" + modifyAcls.mkString(",")) - if (aclsEnabled() && (user != null) && (!modifyAcls.contains(user))) false else true + (aclsEnabled() && (user != null) && (!modifyAcls.contains(user))) == false } diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala index 956c2b9cbd321..a4ebe56dc8778 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala @@ -39,7 +39,7 @@ class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext { } } val prunedRDD = PartitionPruningRDD.create(rdd, { - x => if (x == 2) true else false + x => (x == 2) }) assert(prunedRDD.partitions.length == 1) val p = prunedRDD.partitions(0) @@ -63,11 +63,11 @@ class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext { } } val prunedRDD1 = PartitionPruningRDD.create(rdd, { - x => if (x == 0) true else false + x => (x == 0) }) val prunedRDD2 = PartitionPruningRDD.create(rdd, { - x => if (x == 2) true else false + x => (x == 2) }) val merged = prunedRDD1 ++ prunedRDD2 diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala b/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala index 794bc60d0e315..9a61600115872 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala @@ -158,9 +158,7 @@ private[sql] object BOOLEAN extends NativeColumnType(BooleanType, 4, 1) { buffer.put(if (v) 1.toByte else 0.toByte) } - override def extract(buffer: ByteBuffer) = { - if (buffer.get() == 1) true else false - } + override def extract(buffer: ByteBuffer) = buffer.get() == 1 override def setField(row: MutableRow, ordinal: Int, value: Boolean) { row.setBoolean(ordinal, value) diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala index 1da0a1b675554..ebbf91eeb5359 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala @@ -218,7 +218,7 @@ trait ClientBase extends Logging { if (! localPath.isEmpty()) { val localURI = new URI(localPath) if (!ClientBase.LOCAL_SCHEME.equals(localURI.getScheme())) { - val setPermissions = if (destName.equals(ClientBase.APP_JAR)) true else false + val setPermissions = destName.equals(ClientBase.APP_JAR) val destPath = copyRemoteFile(dst, qualifyForLocal(localURI), replication, setPermissions) val destFs = FileSystem.get(destPath.toUri(), conf) distCacheMgr.addResource(destFs, conf, destPath, localResources, LocalResourceType.FILE, From e16239c23516b3432aa02850646bad8aa225f2a8 Mon Sep 17 00:00:00 2001 From: scwf Date: Sun, 17 Aug 2014 09:36:01 +0800 Subject: [PATCH 3/4] fix confilct --- core/src/main/scala/org/apache/spark/SecurityManager.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala index db117c9caf396..25c2c9fc6af7c 100644 --- a/core/src/main/scala/org/apache/spark/SecurityManager.scala +++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala @@ -309,7 +309,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging { def checkModifyPermissions(user: String): Boolean = { logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" + modifyAcls.mkString(",")) - if (aclsEnabled() && (user != null) && (!viewAcls.contains(user))) false else true + if (aclsEnabled() && (user != null) && (!modifyAcls.contains(user))) false else true } From b2a044a13fcdfc69f897f0319a4d383407e2291c Mon Sep 17 00:00:00 2001 From: scwf Date: Sun, 17 Aug 2014 14:14:25 +0800 Subject: [PATCH 4/4] merge SecurityManager --- .../scala/org/apache/spark/SecurityManager.scala | 4 ++-- .../apache/spark/rdd/PartitionPruningRDDSuite.scala | 13 ++++--------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala index 25c2c9fc6af7c..12b15fe0815be 100644 --- a/core/src/main/scala/org/apache/spark/SecurityManager.scala +++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala @@ -294,7 +294,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging { def checkUIViewPermissions(user: String): Boolean = { logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " viewAcls=" + viewAcls.mkString(",")) - if (aclsEnabled() && (user != null) && (!viewAcls.contains(user))) false else true + !aclsEnabled || user == null || viewAcls.contains(user) } /** @@ -309,7 +309,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging { def checkModifyPermissions(user: String): Boolean = { logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" + modifyAcls.mkString(",")) - if (aclsEnabled() && (user != null) && (!modifyAcls.contains(user))) false else true + !aclsEnabled || user == null || modifyAcls.contains(user) } diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala index a4ebe56dc8778..8408d7e785c65 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala @@ -38,9 +38,7 @@ class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext { Iterator() } } - val prunedRDD = PartitionPruningRDD.create(rdd, { - x => (x == 2) - }) + val prunedRDD = PartitionPruningRDD.create(rdd, _ == 2) assert(prunedRDD.partitions.length == 1) val p = prunedRDD.partitions(0) assert(p.index == 0) @@ -62,13 +60,10 @@ class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext { List(split.asInstanceOf[TestPartition].testValue).iterator } } - val prunedRDD1 = PartitionPruningRDD.create(rdd, { - x => (x == 0) - }) + val prunedRDD1 = PartitionPruningRDD.create(rdd, _ == 0) - val prunedRDD2 = PartitionPruningRDD.create(rdd, { - x => (x == 2) - }) + + val prunedRDD2 = PartitionPruningRDD.create(rdd, _ == 2) val merged = prunedRDD1 ++ prunedRDD2 assert(merged.count() == 2)