From b12fced7b5c8d34daaf84bd1dc0d885db0d49468 Mon Sep 17 00:00:00 2001 From: root1 Date: Fri, 15 Nov 2019 12:14:01 +0530 Subject: [PATCH] [SPARK-29887][SQL]PostgreSQL dialect: cast to smallint --- .../catalyst/analysis/PostgreSQLDialect.scala | 23 ++++- .../spark/sql/catalyst/expressions/Cast.scala | 4 +- .../postgreSQL/PostgreCastToShort.scala | 87 +++++++++++++++++++ 3 files changed, 109 insertions(+), 5 deletions(-) create mode 100644 sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToShort.scala diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala index e7f0e571804d3..4f891502ec23f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala @@ -19,15 +19,15 @@ package org.apache.spark.sql.catalyst.analysis import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.expressions.Cast -import org.apache.spark.sql.catalyst.expressions.postgreSQL.PostgreCastToBoolean +import org.apache.spark.sql.catalyst.expressions.postgreSQL.{PostgreCastToBoolean, PostgreCastToShort} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.types.{BooleanType, StringType} +import org.apache.spark.sql.types.{BooleanType, ShortType, StringType} object PostgreSQLDialect { val postgreSQLDialectRules: List[Rule[LogicalPlan]] = - CastToBoolean :: + CastToBoolean :: CastToShort :: Nil object CastToBoolean extends Rule[LogicalPlan] with Logging { @@ -46,4 +46,21 @@ object PostgreSQLDialect { } } } + + object CastToShort extends Rule[LogicalPlan] with Logging { + override def apply(plan: LogicalPlan): LogicalPlan = { + // The SQL configuration `spark.sql.dialect` can be changed in runtime. + // To make sure the configuration is effective, we have to check it during rule execution. + val conf = SQLConf.get + if (conf.usePostgreSQLDialect) { + plan.transformExpressions { + case Cast(child, dataType, timeZoneId) + if child.dataType != ShortType && dataType == ShortType => + PostgreCastToShort(child, timeZoneId) + } + } else { + plan + } + } + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index f3b58fa3137b1..ba55fbcf07411 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -507,7 +507,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit } // ShortConverter - private[this] def castToShort(from: DataType): Any => Any = from match { + protected[this] def castToShort(from: DataType): Any => Any = from match { case StringType => val result = new IntWrapper() buildCast[UTF8String](_, s => if (s.toShort(result)) { @@ -1363,7 +1363,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit (c, evPrim, evNull) => code"$evPrim = (byte) $c;" } - private[this] def castToShortCode( + protected[this] def castToShortCode( from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToShort.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToShort.scala new file mode 100644 index 0000000000000..92c2f24ba12bb --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToShort.scala @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.expressions.postgreSQL + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.expressions.{CastBase, Expression, TimeZoneAwareExpression} +import org.apache.spark.sql.catalyst.expressions.codegen.Block._ +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types._ +import org.apache.spark.unsafe.types.UTF8String +import org.apache.spark.unsafe.types.UTF8String.IntWrapper + +case class PostgreCastToShort(child: Expression, timeZoneId: Option[String]) + extends CastBase { + + override protected def ansiEnabled = + throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode") + + override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = + copy(timeZoneId = Option(timeZoneId)) + + override def checkInputDataTypes(): TypeCheckResult = child.dataType match { + case StringType | IntegerType | LongType | ByteType | FloatType | DoubleType | NullType => + TypeCheckResult.TypeCheckSuccess + case _: DecimalType => TypeCheckResult.TypeCheckSuccess + case _ => + TypeCheckResult.TypeCheckFailure(s"cannot cast type ${child.dataType} to short") + } + + override def castToShort(from: DataType): Any => Any = from match { + case StringType => + val result = new IntWrapper() + buildCast[UTF8String](_, s => if (s.toShort(result)) { + result.value.toShort + } else { + throw new AnalysisException(s"invalid input syntax for integer: $s") + }) + case x: NumericType => + b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort + } + + def castDecimalToIntegralTypeCode(ctx: CodegenContext, integralType: String): CastFunction = { + (c, evPrim, evNull) => code"$evPrim = $c.to${integralType.capitalize}();" + } + + override def castToShortCode(from: DataType, ctx: CodegenContext): CastFunction = from match { + case StringType => + val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper]) + (c, evPrim, evNull) => + code""" + UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper(); + if ($c.toShort($wrapper)) { + $evPrim = (short) $wrapper.value; + } else { + throw new AnalysisException("invalid input syntax for integer: $c") + } + $wrapper = null; + """ + + case DecimalType() => castDecimalToIntegralTypeCode(ctx, "short") + case _: NumericType => + (c, evPrim, evNull) => code"$evPrim = (short) $c;" + } + + override def dataType: DataType = ShortType + + override def nullable: Boolean = child.nullable + + override def toString: String = s"PostgreCastToShort($child as ${dataType.simpleString})" + + override def sql: String = s"CAST(${child.sql} AS ${dataType.sql})" +}