From fda100f3c42bf82c9d0accafc7230c906e0b8317 Mon Sep 17 00:00:00 2001 From: Herman van Hovell Date: Fri, 26 Aug 2016 01:31:47 +0200 Subject: [PATCH 1/2] Add BigDecimal literal to parser. --- .../spark/sql/catalyst/parser/SqlBase.g4 | 6 + .../sql/catalyst/expressions/literals.scala | 2 +- .../sql/catalyst/parser/AstBuilder.scala | 16 +- .../parser/ExpressionParserSuite.scala | 7 + .../resources/sql-tests/inputs/literals.sql | 6 + .../sql-tests/results/literals.sql.out | 184 ++++++++++-------- .../catalyst/ExpressionSQLBuilderSuite.scala | 1 + 7 files changed, 139 insertions(+), 83 deletions(-) diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index cab7c3ff5a8f7..a8af840c1e2a2 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -633,6 +633,7 @@ number | MINUS? SMALLINT_LITERAL #smallIntLiteral | MINUS? TINYINT_LITERAL #tinyIntLiteral | MINUS? DOUBLE_LITERAL #doubleLiteral + | MINUS? BIGDECIMAL_LITERAL #bigDecimalLiteral ; nonReserved @@ -928,6 +929,11 @@ DOUBLE_LITERAL (INTEGER_VALUE | DECIMAL_VALUE | SCIENTIFIC_DECIMAL_VALUE) 'D' ; +BIGDECIMAL_LITERAL + : + (INTEGER_VALUE | DECIMAL_VALUE | SCIENTIFIC_DECIMAL_VALUE) 'BD' + ; + IDENTIFIER : (LETTER | DIGIT | '_')+ ; diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 730a7f62e04c7..41e3952f0e253 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -266,7 +266,7 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression with case Double.NegativeInfinity => s"CAST('-Infinity' AS ${DoubleType.sql})" case _ => v + "D" } - case (v: Decimal, t: DecimalType) => s"CAST($v AS ${t.sql})" + case (v: Decimal, t: DecimalType) => v + "BD" case (v: Int, DateType) => s"DATE '${DateTimeUtils.toJavaDate(v)}'" case (v: Long, TimestampType) => s"TIMESTAMP('${DateTimeUtils.toJavaTimestamp(v)}')" case _ => value.toString diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 8b98efcbf33c8..893db93368457 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -26,7 +26,8 @@ import org.antlr.v4.runtime.{ParserRuleContext, Token} import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode} import org.apache.spark.internal.Logging -import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier} +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.analysis._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ @@ -1323,6 +1324,19 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging { numericLiteral(ctx, Double.MinValue, Double.MaxValue, DoubleType.simpleString)(_.toDouble) } + /** + * Create a BigDecimal Literal expression. + */ + override def visitBigDecimalLiteral(ctx: BigDecimalLiteralContext): Literal = { + val raw = ctx.getText.substring(0, ctx.getText.length - 2) + try { + Literal(BigDecimal(raw).underlying()) + } catch { + case e: AnalysisException => + throw new ParseException(e.message, ctx) + } + } + /** * Create a String literal expression. */ diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 401d9cd9d288c..dbc5db39aed98 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -392,6 +392,13 @@ class ExpressionParserSuite extends PlanTest { intercept("1.8E308D", s"does not fit in range") // TODO we need to figure out if we should throw an exception here! assertEqual("1E309", Literal(Double.PositiveInfinity)) + + // BigDecimal Literal + assertEqual("90912830918230182310293801923652346786BD", + Literal(BigDecimal("90912830918230182310293801923652346786").underlying())) + assertEqual("123.0E-28BD", Literal(BigDecimal("123.0E-28").underlying())) + assertEqual("123.08BD", Literal(BigDecimal("123.08").underlying())) + intercept("1.20E-38BD", "DecimalType can only support precision up to 38") } test("strings") { diff --git a/sql/core/src/test/resources/sql-tests/inputs/literals.sql b/sql/core/src/test/resources/sql-tests/inputs/literals.sql index 62f0d3d0599c6..e786775d0b63f 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/literals.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/literals.sql @@ -27,6 +27,12 @@ select 9223372036854775807L, -9223372036854775808L; -- out of range long select 9223372036854775808L; +-- big decimal parsing +select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD; + +-- out of range big decimal +select 1.20E-38BD; + -- integral parsing -- parse int diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 67e6d78dfbf24..f9d67af1d173d 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 38 +-- Number of queries: 40 -- !query 0 @@ -109,42 +109,64 @@ select 9223372036854775808L -- !query 11 -select 1, -1 +select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD -- !query 11 schema -struct<1:int,-1:int> +struct<90912830918230182310293801923652346786:decimal(38,0),1.230E-26:decimal(29,29),123.08:decimal(5,2)> -- !query 11 output -1 -1 +90912830918230182310293801923652346786 0.0000000000000000000000000123 123.08 -- !query 12 -select 2147483647, -2147483648 +select 1.20E-38BD -- !query 12 schema -struct<2147483647:int,-2147483648:int> +struct<> -- !query 12 output -2147483647 -2147483648 +org.apache.spark.sql.catalyst.parser.ParseException + +DecimalType can only support precision up to 38(line 1, pos 7) + +== SQL == +select 1.20E-38BD +-------^^^ -- !query 13 -select 9223372036854775807, -9223372036854775808 +select 1, -1 -- !query 13 schema -struct<9223372036854775807:bigint,-9223372036854775808:bigint> +struct<1:int,-1:int> -- !query 13 output -9223372036854775807 -9223372036854775808 +1 -1 -- !query 14 -select 9223372036854775808, -9223372036854775809 +select 2147483647, -2147483648 -- !query 14 schema -struct<9223372036854775808:decimal(19,0),-9223372036854775809:decimal(19,0)> +struct<2147483647:int,-2147483648:int> -- !query 14 output -9223372036854775808 -9223372036854775809 +2147483647 -2147483648 -- !query 15 -select 1234567890123456789012345678901234567890 +select 9223372036854775807, -9223372036854775808 -- !query 15 schema -struct<> +struct<9223372036854775807:bigint,-9223372036854775808:bigint> -- !query 15 output +9223372036854775807 -9223372036854775808 + + +-- !query 16 +select 9223372036854775808, -9223372036854775809 +-- !query 16 schema +struct<9223372036854775808:decimal(19,0),-9223372036854775809:decimal(19,0)> +-- !query 16 output +9223372036854775808 -9223372036854775809 + + +-- !query 17 +select 1234567890123456789012345678901234567890 +-- !query 17 schema +struct<> +-- !query 17 output org.apache.spark.sql.catalyst.parser.ParseException DecimalType can only support precision up to 38 @@ -152,11 +174,11 @@ DecimalType can only support precision up to 38 select 1234567890123456789012345678901234567890 --- !query 16 +-- !query 18 select 1234567890123456789012345678901234567890.0 --- !query 16 schema +-- !query 18 schema struct<> --- !query 16 output +-- !query 18 output org.apache.spark.sql.catalyst.parser.ParseException DecimalType can only support precision up to 38 @@ -164,27 +186,27 @@ DecimalType can only support precision up to 38 select 1234567890123456789012345678901234567890.0 --- !query 17 +-- !query 19 select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1 --- !query 17 schema +-- !query 19 schema struct<1.0:double,1.2:double,1.0E10:double,150000.0:double,0.1:double,0.1:double,10000.0:double,90.0:double,90.0:double,90.0:double,90.0:double> --- !query 17 output +-- !query 19 output 1.0 1.2 1.0E10 150000.0 0.1 0.1 10000.0 90.0 90.0 90.0 90.0 --- !query 18 +-- !query 20 select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5 --- !query 18 schema +-- !query 20 schema struct<-1.0:double,-1.2:double,-1.0E10:double,-150000.0:double,-0.1:double,-0.1:double,-10000.0:double> --- !query 18 output +-- !query 20 output -1.0 -1.2 -1.0E10 -150000.0 -0.1 -0.1 -10000.0 --- !query 19 +-- !query 21 select .e3 --- !query 19 schema +-- !query 21 schema struct<> --- !query 19 output +-- !query 21 output org.apache.spark.sql.catalyst.parser.ParseException no viable alternative at input 'select .'(line 1, pos 7) @@ -194,145 +216,145 @@ select .e3 -------^^^ --- !query 20 +-- !query 22 select 1E309, -1E309 --- !query 20 schema +-- !query 22 schema struct --- !query 20 output +-- !query 22 output Infinity -Infinity --- !query 21 +-- !query 23 select 0.3, -0.8, .5, -.18, 0.1111, .1111 --- !query 21 schema +-- !query 23 schema struct<0.3:decimal(1,1),-0.8:decimal(1,1),0.5:decimal(1,1),-0.18:decimal(2,2),0.1111:decimal(4,4),0.1111:decimal(4,4)> --- !query 21 output +-- !query 23 output 0.3 -0.8 0.5 -0.18 0.1111 0.1111 --- !query 22 +-- !query 24 select 123456789012345678901234567890123456789e10, 123456789012345678901234567890123456789.1e10 --- !query 22 schema +-- !query 24 schema struct<1.2345678901234568E48:double,1.2345678901234568E48:double> --- !query 22 output +-- !query 24 output 1.2345678901234568E48 1.2345678901234568E48 --- !query 23 +-- !query 25 select "Hello Peter!", 'hello lee!' --- !query 23 schema +-- !query 25 schema struct --- !query 23 output +-- !query 25 output Hello Peter! hello lee! --- !query 24 +-- !query 26 select 'hello' 'world', 'hello' " " 'lee' --- !query 24 schema +-- !query 26 schema struct --- !query 24 output +-- !query 26 output helloworld hello lee --- !query 25 +-- !query 27 select "hello 'peter'" --- !query 25 schema +-- !query 27 schema struct --- !query 25 output +-- !query 27 output hello 'peter' --- !query 26 +-- !query 28 select 'pattern%', 'no-pattern\%', 'pattern\\%', 'pattern\\\%' --- !query 26 schema +-- !query 28 schema struct --- !query 26 output +-- !query 28 output pattern% no-pattern\% pattern\% pattern\\% --- !query 27 +-- !query 29 select '\'', '"', '\n', '\r', '\t', 'Z' --- !query 27 schema +-- !query 29 schema struct<':string,":string, :string, :string, :string,Z:string> --- !query 27 output +-- !query 29 output ' " Z --- !query 28 +-- !query 30 select '\110\145\154\154\157\041' --- !query 28 schema +-- !query 30 schema struct --- !query 28 output +-- !query 30 output Hello! --- !query 29 +-- !query 31 select '\u0057\u006F\u0072\u006C\u0064\u0020\u003A\u0029' --- !query 29 schema +-- !query 31 schema struct --- !query 29 output +-- !query 31 output World :) --- !query 30 +-- !query 32 select dAte '2016-03-12' --- !query 30 schema +-- !query 32 schema struct --- !query 30 output +-- !query 32 output 2016-03-12 --- !query 31 +-- !query 33 select date 'mar 11 2016' --- !query 31 schema +-- !query 33 schema struct<> --- !query 31 output +-- !query 33 output java.lang.IllegalArgumentException null --- !query 32 +-- !query 34 select tImEstAmp '2016-03-11 20:54:00.000' --- !query 32 schema +-- !query 34 schema struct --- !query 32 output +-- !query 34 output 2016-03-11 20:54:00 --- !query 33 +-- !query 35 select timestamp '2016-33-11 20:54:00.000' --- !query 33 schema +-- !query 35 schema struct<> --- !query 33 output +-- !query 35 output java.lang.IllegalArgumentException Timestamp format must be yyyy-mm-dd hh:mm:ss[.fffffffff] --- !query 34 +-- !query 36 select interval 13.123456789 seconds, interval -13.123456789 second --- !query 34 schema +-- !query 36 schema struct<> --- !query 34 output +-- !query 36 output scala.MatchError (interval 13 seconds 123 milliseconds 456 microseconds,CalendarIntervalType) (of class scala.Tuple2) --- !query 35 +-- !query 37 select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond, 9 microsecond --- !query 35 schema +-- !query 37 schema struct<> --- !query 35 output +-- !query 37 output scala.MatchError (interval 1 years 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds 8 milliseconds,CalendarIntervalType) (of class scala.Tuple2) --- !query 36 +-- !query 38 select interval 10 nanoseconds --- !query 36 schema +-- !query 38 schema struct<> --- !query 36 output +-- !query 38 output org.apache.spark.sql.catalyst.parser.ParseException No interval can be constructed(line 1, pos 16) @@ -342,11 +364,11 @@ select interval 10 nanoseconds ----------------^^^ --- !query 37 +-- !query 39 select GEO '(10,-6)' --- !query 37 schema +-- !query 39 schema struct<> --- !query 37 output +-- !query 39 output org.apache.spark.sql.catalyst.parser.ParseException Literals of type 'GEO' are currently not supported.(line 1, pos 7) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/ExpressionSQLBuilderSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/ExpressionSQLBuilderSuite.scala index 86724cbb676c4..43a218b4d14b3 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/ExpressionSQLBuilderSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/catalyst/ExpressionSQLBuilderSuite.scala @@ -39,6 +39,7 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest { checkSQL(Literal(Double.PositiveInfinity), "CAST('Infinity' AS DOUBLE)") checkSQL(Literal(Double.NegativeInfinity), "CAST('-Infinity' AS DOUBLE)") checkSQL(Literal(Double.NaN), "CAST('NaN' AS DOUBLE)") + checkSQL(Literal(BigDecimal("10.0000000").underlying), "10.0000000BD") checkSQL( Literal(Timestamp.valueOf("2016-01-01 00:00:00")), "TIMESTAMP('2016-01-01 00:00:00.0')") // TODO tests for decimals From e889a25b165be7fa8afdd105866e4b305016b4c9 Mon Sep 17 00:00:00 2001 From: Herman van Hovell Date: Fri, 26 Aug 2016 10:33:41 +0200 Subject: [PATCH 2/2] Move test around to reduce size of the diff --- .../resources/sql-tests/inputs/literals.sql | 12 +- .../sql-tests/results/literals.sql.out | 204 +++++++++--------- 2 files changed, 108 insertions(+), 108 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/literals.sql b/sql/core/src/test/resources/sql-tests/inputs/literals.sql index e786775d0b63f..a532a598c6bf9 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/literals.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/literals.sql @@ -27,12 +27,6 @@ select 9223372036854775807L, -9223372036854775808L; -- out of range long select 9223372036854775808L; --- big decimal parsing -select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD; - --- out of range big decimal -select 1.20E-38BD; - -- integral parsing -- parse int @@ -96,3 +90,9 @@ select interval 10 nanoseconds; -- unsupported data type select GEO '(10,-6)'; + +-- big decimal parsing +select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD; + +-- out of range big decimal +select 1.20E-38BD; diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index f9d67af1d173d..85629f7ba813a 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -109,64 +109,42 @@ select 9223372036854775808L -- !query 11 -select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD --- !query 11 schema -struct<90912830918230182310293801923652346786:decimal(38,0),1.230E-26:decimal(29,29),123.08:decimal(5,2)> --- !query 11 output -90912830918230182310293801923652346786 0.0000000000000000000000000123 123.08 - - --- !query 12 -select 1.20E-38BD --- !query 12 schema -struct<> --- !query 12 output -org.apache.spark.sql.catalyst.parser.ParseException - -DecimalType can only support precision up to 38(line 1, pos 7) - -== SQL == -select 1.20E-38BD --------^^^ - - --- !query 13 select 1, -1 --- !query 13 schema +-- !query 11 schema struct<1:int,-1:int> --- !query 13 output +-- !query 11 output 1 -1 --- !query 14 +-- !query 12 select 2147483647, -2147483648 --- !query 14 schema +-- !query 12 schema struct<2147483647:int,-2147483648:int> --- !query 14 output +-- !query 12 output 2147483647 -2147483648 --- !query 15 +-- !query 13 select 9223372036854775807, -9223372036854775808 --- !query 15 schema +-- !query 13 schema struct<9223372036854775807:bigint,-9223372036854775808:bigint> --- !query 15 output +-- !query 13 output 9223372036854775807 -9223372036854775808 --- !query 16 +-- !query 14 select 9223372036854775808, -9223372036854775809 --- !query 16 schema +-- !query 14 schema struct<9223372036854775808:decimal(19,0),-9223372036854775809:decimal(19,0)> --- !query 16 output +-- !query 14 output 9223372036854775808 -9223372036854775809 --- !query 17 +-- !query 15 select 1234567890123456789012345678901234567890 --- !query 17 schema +-- !query 15 schema struct<> --- !query 17 output +-- !query 15 output org.apache.spark.sql.catalyst.parser.ParseException DecimalType can only support precision up to 38 @@ -174,11 +152,11 @@ DecimalType can only support precision up to 38 select 1234567890123456789012345678901234567890 --- !query 18 +-- !query 16 select 1234567890123456789012345678901234567890.0 --- !query 18 schema +-- !query 16 schema struct<> --- !query 18 output +-- !query 16 output org.apache.spark.sql.catalyst.parser.ParseException DecimalType can only support precision up to 38 @@ -186,27 +164,27 @@ DecimalType can only support precision up to 38 select 1234567890123456789012345678901234567890.0 --- !query 19 +-- !query 17 select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1 --- !query 19 schema +-- !query 17 schema struct<1.0:double,1.2:double,1.0E10:double,150000.0:double,0.1:double,0.1:double,10000.0:double,90.0:double,90.0:double,90.0:double,90.0:double> --- !query 19 output +-- !query 17 output 1.0 1.2 1.0E10 150000.0 0.1 0.1 10000.0 90.0 90.0 90.0 90.0 --- !query 20 +-- !query 18 select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5 --- !query 20 schema +-- !query 18 schema struct<-1.0:double,-1.2:double,-1.0E10:double,-150000.0:double,-0.1:double,-0.1:double,-10000.0:double> --- !query 20 output +-- !query 18 output -1.0 -1.2 -1.0E10 -150000.0 -0.1 -0.1 -10000.0 --- !query 21 +-- !query 19 select .e3 --- !query 21 schema +-- !query 19 schema struct<> --- !query 21 output +-- !query 19 output org.apache.spark.sql.catalyst.parser.ParseException no viable alternative at input 'select .'(line 1, pos 7) @@ -216,145 +194,145 @@ select .e3 -------^^^ --- !query 22 +-- !query 20 select 1E309, -1E309 --- !query 22 schema +-- !query 20 schema struct --- !query 22 output +-- !query 20 output Infinity -Infinity --- !query 23 +-- !query 21 select 0.3, -0.8, .5, -.18, 0.1111, .1111 --- !query 23 schema +-- !query 21 schema struct<0.3:decimal(1,1),-0.8:decimal(1,1),0.5:decimal(1,1),-0.18:decimal(2,2),0.1111:decimal(4,4),0.1111:decimal(4,4)> --- !query 23 output +-- !query 21 output 0.3 -0.8 0.5 -0.18 0.1111 0.1111 --- !query 24 +-- !query 22 select 123456789012345678901234567890123456789e10, 123456789012345678901234567890123456789.1e10 --- !query 24 schema +-- !query 22 schema struct<1.2345678901234568E48:double,1.2345678901234568E48:double> --- !query 24 output +-- !query 22 output 1.2345678901234568E48 1.2345678901234568E48 --- !query 25 +-- !query 23 select "Hello Peter!", 'hello lee!' --- !query 25 schema +-- !query 23 schema struct --- !query 25 output +-- !query 23 output Hello Peter! hello lee! --- !query 26 +-- !query 24 select 'hello' 'world', 'hello' " " 'lee' --- !query 26 schema +-- !query 24 schema struct --- !query 26 output +-- !query 24 output helloworld hello lee --- !query 27 +-- !query 25 select "hello 'peter'" --- !query 27 schema +-- !query 25 schema struct --- !query 27 output +-- !query 25 output hello 'peter' --- !query 28 +-- !query 26 select 'pattern%', 'no-pattern\%', 'pattern\\%', 'pattern\\\%' --- !query 28 schema +-- !query 26 schema struct --- !query 28 output +-- !query 26 output pattern% no-pattern\% pattern\% pattern\\% --- !query 29 +-- !query 27 select '\'', '"', '\n', '\r', '\t', 'Z' --- !query 29 schema +-- !query 27 schema struct<':string,":string, :string, :string, :string,Z:string> --- !query 29 output +-- !query 27 output ' " Z --- !query 30 +-- !query 28 select '\110\145\154\154\157\041' --- !query 30 schema +-- !query 28 schema struct --- !query 30 output +-- !query 28 output Hello! --- !query 31 +-- !query 29 select '\u0057\u006F\u0072\u006C\u0064\u0020\u003A\u0029' --- !query 31 schema +-- !query 29 schema struct --- !query 31 output +-- !query 29 output World :) --- !query 32 +-- !query 30 select dAte '2016-03-12' --- !query 32 schema +-- !query 30 schema struct --- !query 32 output +-- !query 30 output 2016-03-12 --- !query 33 +-- !query 31 select date 'mar 11 2016' --- !query 33 schema +-- !query 31 schema struct<> --- !query 33 output +-- !query 31 output java.lang.IllegalArgumentException null --- !query 34 +-- !query 32 select tImEstAmp '2016-03-11 20:54:00.000' --- !query 34 schema +-- !query 32 schema struct --- !query 34 output +-- !query 32 output 2016-03-11 20:54:00 --- !query 35 +-- !query 33 select timestamp '2016-33-11 20:54:00.000' --- !query 35 schema +-- !query 33 schema struct<> --- !query 35 output +-- !query 33 output java.lang.IllegalArgumentException Timestamp format must be yyyy-mm-dd hh:mm:ss[.fffffffff] --- !query 36 +-- !query 34 select interval 13.123456789 seconds, interval -13.123456789 second --- !query 36 schema +-- !query 34 schema struct<> --- !query 36 output +-- !query 34 output scala.MatchError (interval 13 seconds 123 milliseconds 456 microseconds,CalendarIntervalType) (of class scala.Tuple2) --- !query 37 +-- !query 35 select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond, 9 microsecond --- !query 37 schema +-- !query 35 schema struct<> --- !query 37 output +-- !query 35 output scala.MatchError (interval 1 years 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds 8 milliseconds,CalendarIntervalType) (of class scala.Tuple2) --- !query 38 +-- !query 36 select interval 10 nanoseconds --- !query 38 schema +-- !query 36 schema struct<> --- !query 38 output +-- !query 36 output org.apache.spark.sql.catalyst.parser.ParseException No interval can be constructed(line 1, pos 16) @@ -364,11 +342,11 @@ select interval 10 nanoseconds ----------------^^^ --- !query 39 +-- !query 37 select GEO '(10,-6)' --- !query 39 schema +-- !query 37 schema struct<> --- !query 39 output +-- !query 37 output org.apache.spark.sql.catalyst.parser.ParseException Literals of type 'GEO' are currently not supported.(line 1, pos 7) @@ -376,3 +354,25 @@ Literals of type 'GEO' are currently not supported.(line 1, pos 7) == SQL == select GEO '(10,-6)' -------^^^ + + +-- !query 38 +select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD +-- !query 38 schema +struct<90912830918230182310293801923652346786:decimal(38,0),1.230E-26:decimal(29,29),123.08:decimal(5,2)> +-- !query 38 output +90912830918230182310293801923652346786 0.0000000000000000000000000123 123.08 + + +-- !query 39 +select 1.20E-38BD +-- !query 39 schema +struct<> +-- !query 39 output +org.apache.spark.sql.catalyst.parser.ParseException + +DecimalType can only support precision up to 38(line 1, pos 7) + +== SQL == +select 1.20E-38BD +-------^^^