From d2e931841a81d676297e362f8f9423869ad078c2 Mon Sep 17 00:00:00 2001 From: Max Gekk <max.gekk@gmail.com> Date: Tue, 25 Mar 2025 13:13:42 +0300 Subject: [PATCH] Parse time as TimeType --- .../org/apache/spark/sql/types/DataType.scala | 5 ++-- .../spark/sql/types/DataTypeSuite.scala | 23 ++++++++++++++++++- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/api/src/main/scala/org/apache/spark/sql/types/DataType.scala index db7e7c0ae1885..f798276d60f7c 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/types/DataType.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/types/DataType.scala @@ -174,7 +174,7 @@ object DataType { def fromJson(json: String): DataType = parseDataType(parse(json)) private val otherTypes = { - Seq( + (Seq( NullType, DateType, TimestampType, @@ -202,7 +202,8 @@ object DataType { YearMonthIntervalType(MONTH), YearMonthIntervalType(YEAR, MONTH), TimestampNTZType, - VariantType) + VariantType) ++ + (TimeType.MIN_PRECISION to TimeType.MAX_PRECISION).map(TimeType(_))) .map(t => t.typeName -> t) .toMap } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala index 53967a435a762..115d561cbe7b8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala @@ -22,7 +22,7 @@ import org.json4s.jackson.JsonMethods import org.apache.spark.{SparkException, SparkFunSuite, SparkIllegalArgumentException} import org.apache.spark.sql.catalyst.analysis.{caseInsensitiveResolution, caseSensitiveResolution} -import org.apache.spark.sql.catalyst.parser.CatalystSqlParser +import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException} import org.apache.spark.sql.catalyst.types.DataTypeUtils import org.apache.spark.sql.catalyst.util.{CollationFactory, StringConcat} import org.apache.spark.sql.types.DataTypeTestUtils.{dayTimeIntervalTypes, yearMonthIntervalTypes} @@ -1393,4 +1393,25 @@ class DataTypeSuite extends SparkFunSuite { ) } } + + test("Parse time(n) as TimeType(n)") { + 0 to 6 foreach { n => + assert(DataType.fromJson(s"\"time($n)\"") == TimeType(n)) + val expectedStructType = StructType(Seq(StructField("t", TimeType(n)))) + assert(DataType.fromDDL(s"t time($n)") == expectedStructType) + } + + checkError( + exception = intercept[SparkIllegalArgumentException] { + DataType.fromJson("\"time(9)\"") + }, + condition = "INVALID_JSON_DATA_TYPE", + parameters = Map("invalidType" -> "time(9)")) + checkError( + exception = intercept[ParseException] { + DataType.fromDDL("t time(-1)") + }, + condition = "PARSE_SYNTAX_ERROR", + parameters = Map("error" -> "'time'", "hint" -> "")) + } }