Skip to content

Commit ec99c46

Browse files
committed
Fix review comments 2
1 parent b98a216 commit ec99c46

File tree

4 files changed

+60
-113
lines changed

4 files changed

+60
-113
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala

Lines changed: 40 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -750,6 +750,21 @@ case class TimeTrunc(unit: Expression, time: Expression)
750750
}
751751
}
752752

753+
abstract class TimeFromBase extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
754+
protected def timeConversionMethod: String
755+
756+
override def inputTypes: Seq[AbstractDataType] = Seq(IntegralType)
757+
override def dataType: DataType = TimeType(TimeType.MICROS_PRECISION)
758+
759+
override def replacement: Expression = StaticInvoke(
760+
classOf[DateTimeUtils.type],
761+
dataType,
762+
timeConversionMethod,
763+
Seq(child),
764+
Seq(child.dataType)
765+
)
766+
}
767+
753768
// scalastyle:off line.size.limit
754769
@ExpressionDescription(
755770
usage = "_FUNC_(seconds) - Creates a TIME value from seconds since midnight.",
@@ -772,20 +787,10 @@ case class TimeTrunc(unit: Expression, time: Expression)
772787
since = "4.2.0",
773788
group = "datetime_funcs")
774789
// scalastyle:on line.size.limit
775-
case class TimeFromSeconds(child: Expression)
776-
extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
777-
778-
override def replacement: Expression = StaticInvoke(
779-
classOf[DateTimeUtils.type],
780-
TimeType(TimeType.MICROS_PRECISION),
781-
"timeFromSeconds",
782-
Seq(child),
783-
Seq(child.dataType)
784-
)
785-
790+
case class TimeFromSeconds(child: Expression) extends TimeFromBase {
786791
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
787-
override def dataType: DataType = TimeType(TimeType.MICROS_PRECISION)
788792
override def prettyName: String = "time_from_seconds"
793+
override protected def timeConversionMethod: String = "timeFromSeconds"
789794

790795
override protected def withNewChildInternal(newChild: Expression): TimeFromSeconds =
791796
copy(child = newChild)
@@ -812,20 +817,9 @@ case class TimeFromSeconds(child: Expression)
812817
since = "4.2.0",
813818
group = "datetime_funcs")
814819
// scalastyle:on line.size.limit
815-
case class TimeFromMillis(child: Expression)
816-
extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
817-
818-
override def replacement: Expression = StaticInvoke(
819-
classOf[DateTimeUtils.type],
820-
TimeType(TimeType.MICROS_PRECISION),
821-
"timeFromMillis",
822-
Seq(child),
823-
Seq(child.dataType)
824-
)
825-
826-
override def inputTypes: Seq[AbstractDataType] = Seq(IntegralType)
827-
override def dataType: DataType = TimeType(TimeType.MICROS_PRECISION)
820+
case class TimeFromMillis(child: Expression) extends TimeFromBase {
828821
override def prettyName: String = "time_from_millis"
822+
override protected def timeConversionMethod: String = "timeFromMillis"
829823

830824
override protected def withNewChildInternal(newChild: Expression): TimeFromMillis =
831825
copy(child = newChild)
@@ -852,23 +846,27 @@ case class TimeFromMillis(child: Expression)
852846
since = "4.2.0",
853847
group = "datetime_funcs")
854848
// scalastyle:on line.size.limit
855-
case class TimeFromMicros(child: Expression)
856-
extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
849+
case class TimeFromMicros(child: Expression) extends TimeFromBase {
850+
override def prettyName: String = "time_from_micros"
851+
override protected def timeConversionMethod: String = "timeFromMicros"
852+
853+
override protected def withNewChildInternal(newChild: Expression): TimeFromMicros =
854+
copy(child = newChild)
855+
}
856+
857+
abstract class TimeToBase extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
858+
protected def timeConversionMethod: String
859+
860+
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)
861+
override def dataType: DataType = LongType
857862

858863
override def replacement: Expression = StaticInvoke(
859864
classOf[DateTimeUtils.type],
860-
TimeType(TimeType.MICROS_PRECISION),
861-
"timeFromMicros",
865+
dataType,
866+
timeConversionMethod,
862867
Seq(child),
863868
Seq(child.dataType)
864869
)
865-
866-
override def inputTypes: Seq[AbstractDataType] = Seq(IntegralType)
867-
override def dataType: DataType = TimeType(TimeType.MICROS_PRECISION)
868-
override def prettyName: String = "time_from_micros"
869-
870-
override protected def withNewChildInternal(newChild: Expression): TimeFromMicros =
871-
copy(child = newChild)
872870
}
873871

874872
// scalastyle:off line.size.limit
@@ -894,19 +892,11 @@ case class TimeFromMicros(child: Expression)
894892
group = "datetime_funcs")
895893
// scalastyle:on line.size.limit
896894
case class TimeToSeconds(child: Expression)
897-
extends UnaryExpression with RuntimeReplaceable with ImplicitCastInputTypes {
895+
extends TimeToBase with ImplicitCastInputTypes {
898896

899-
override def replacement: Expression = StaticInvoke(
900-
classOf[DateTimeUtils.type],
901-
DecimalType(14, 6),
902-
"timeToSeconds",
903-
Seq(child),
904-
Seq(child.dataType)
905-
)
906-
907-
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)
908897
override def dataType: DataType = DecimalType(14, 6)
909898
override def prettyName: String = "time_to_seconds"
899+
override protected def timeConversionMethod: String = "timeToSeconds"
910900

911901
override protected def withNewChildInternal(newChild: Expression): TimeToSeconds =
912902
copy(child = newChild)
@@ -934,20 +924,9 @@ case class TimeToSeconds(child: Expression)
934924
since = "4.2.0",
935925
group = "datetime_funcs")
936926
// scalastyle:on line.size.limit
937-
case class TimeToMillis(child: Expression)
938-
extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
939-
940-
override def replacement: Expression = StaticInvoke(
941-
classOf[DateTimeUtils.type],
942-
LongType,
943-
"timeToMillis",
944-
Seq(child),
945-
Seq(child.dataType)
946-
)
947-
948-
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)
949-
override def dataType: DataType = LongType
927+
case class TimeToMillis(child: Expression) extends TimeToBase {
950928
override def prettyName: String = "time_to_millis"
929+
override protected def timeConversionMethod: String = "timeToMillis"
951930

952931
override protected def withNewChildInternal(newChild: Expression): TimeToMillis =
953932
copy(child = newChild)
@@ -975,20 +954,9 @@ case class TimeToMillis(child: Expression)
975954
since = "4.2.0",
976955
group = "datetime_funcs")
977956
// scalastyle:on line.size.limit
978-
case class TimeToMicros(child: Expression)
979-
extends UnaryExpression with RuntimeReplaceable with ExpectsInputTypes {
980-
981-
override def replacement: Expression = StaticInvoke(
982-
classOf[DateTimeUtils.type],
983-
LongType,
984-
"timeToMicros",
985-
Seq(child),
986-
Seq(child.dataType)
987-
)
988-
989-
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)
990-
override def dataType: DataType = LongType
957+
case class TimeToMicros(child: Expression) extends TimeToBase {
991958
override def prettyName: String = "time_to_micros"
959+
override protected def timeConversionMethod: String = "timeToMicros"
992960

993961
override protected def withNewChildInternal(newChild: Expression): TimeToMicros =
994962
copy(child = newChild)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala

Lines changed: 5 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -908,10 +908,10 @@ object DateTimeUtils extends SparkDateTimeUtils {
908908
nanos
909909
} catch {
910910
case e: DateTimeException =>
911-
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(e)
911+
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRangeWithoutSuggestion(e)
912912
case e: ArithmeticException =>
913-
val wrapped = new DateTimeException(s"Overflow in TIME conversion: ${e.getMessage}", e)
914-
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRange(wrapped)
913+
throw QueryExecutionErrors.ansiDateTimeArgumentOutOfRangeWithoutSuggestion(
914+
new DateTimeException("Overflow in TIME conversion", e))
915915
}
916916
}
917917

@@ -935,19 +935,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
935935
}
936936

937937
/**
938-
* Creates a TIME value from seconds since midnight (float type).
939-
* @param seconds Seconds (0 to 86399.999999)
940-
* @return Nanoseconds since midnight
941-
*/
942-
def timeFromSeconds(seconds: Float): Long = withTimeConversionErrorHandling {
943-
if (seconds.isNaN || seconds.isInfinite) {
944-
throw new DateTimeException("Cannot convert NaN or Infinite value to TIME")
945-
}
946-
(seconds.toDouble * NANOS_PER_SECOND).toLong
947-
}
948-
949-
/**
950-
* Creates a TIME value from seconds since midnight (double type).
938+
* Creates a TIME value from seconds since midnight (floating point type).
951939
* @param seconds Seconds (0 to 86399.999999)
952940
* @return Nanoseconds since midnight
953941
*/
@@ -983,10 +971,7 @@ object DateTimeUtils extends SparkDateTimeUtils {
983971
*/
984972
def timeToSeconds(nanos: Long): Decimal = {
985973
val result = Decimal(nanos) / Decimal(NANOS_PER_SECOND)
986-
if (!result.changePrecision(14, 6)) {
987-
throw new DateTimeException(
988-
"TIME to seconds conversion resulted in value that cannot fit in Decimal(14, 6)")
989-
}
974+
result.changePrecision(14, 6)
990975
result
991976
}
992977

sql/core/src/test/resources/sql-tests/inputs/time.sql

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -304,14 +304,15 @@ SELECT '00:00:00.1234' :: TIME(4) - TIME'23:59:59';
304304

305305
-- Numeric constructor and extractor functions for TIME type
306306

307+
307308
-- time_from_seconds (valid: 0 to 86399.999999)
308309
SELECT time_from_seconds(0);
309310
SELECT time_from_seconds(43200);
310311
SELECT time_from_seconds(52200.5);
311312
SELECT time_from_seconds(86399.999999);
312-
SELECT time_from_seconds(-1); -- invalid: negative exception
313-
SELECT time_from_seconds(86400); -- invalid: >= 86400 exception
314-
SELECT time_from_seconds(90000); -- invalid: >= 86400 exception
313+
SELECT time_from_seconds(-1); -- invalid: negative -> exception
314+
SELECT time_from_seconds(86400); -- invalid: >= 86400 -> exception
315+
SELECT time_from_seconds(90000); -- invalid: >= 86400 -> exception
315316
SELECT time_from_seconds(NULL);
316317

317318
-- time_from_millis (valid: 0 to 86399999)
@@ -320,8 +321,8 @@ SELECT time_from_millis(43200);
320321
SELECT time_from_millis(52200000);
321322
SELECT time_from_millis(52200500);
322323
SELECT time_from_millis(86399999);
323-
SELECT time_from_millis(-1); -- invalid: negative exception
324-
SELECT time_from_millis(86400000); -- invalid: >= 86400000 exception
324+
SELECT time_from_millis(-1); -- invalid: negative -> exception
325+
SELECT time_from_millis(86400000); -- invalid: >= 86400000 -> exception
325326
SELECT time_from_millis(NULL);
326327

327328
-- time_from_micros (valid: 0 to 86399999999)
@@ -330,8 +331,8 @@ SELECT time_from_micros(43200);
330331
SELECT time_from_micros(52200000000);
331332
SELECT time_from_micros(52200500000);
332333
SELECT time_from_micros(86399999999);
333-
SELECT time_from_micros(-1); -- invalid: negative exception
334-
SELECT time_from_micros(86400000000); -- invalid: >= 86400000000 exception
334+
SELECT time_from_micros(-1); -- invalid: negative -> exception
335+
SELECT time_from_micros(86400000000); -- invalid: >= 86400000000 -> exception
335336
SELECT time_from_micros(NULL);
336337

337338
-- time_to_seconds

sql/core/src/test/resources/sql-tests/results/time.sql.out

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2418,10 +2418,9 @@ struct<>
24182418
-- !query output
24192419
org.apache.spark.SparkDateTimeException
24202420
{
2421-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2421+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
24222422
"sqlState" : "22023",
24232423
"messageParameters" : {
2424-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
24252424
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got -1000000000 nanoseconds"
24262425
}
24272426
}
@@ -2434,10 +2433,9 @@ struct<>
24342433
-- !query output
24352434
org.apache.spark.SparkDateTimeException
24362435
{
2437-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2436+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
24382437
"sqlState" : "22023",
24392438
"messageParameters" : {
2440-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
24412439
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got 86400000000000 nanoseconds"
24422440
}
24432441
}
@@ -2450,10 +2448,9 @@ struct<>
24502448
-- !query output
24512449
org.apache.spark.SparkDateTimeException
24522450
{
2453-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2451+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
24542452
"sqlState" : "22023",
24552453
"messageParameters" : {
2456-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
24572454
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got 90000000000000 nanoseconds"
24582455
}
24592456
}
@@ -2514,10 +2511,9 @@ struct<>
25142511
-- !query output
25152512
org.apache.spark.SparkDateTimeException
25162513
{
2517-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2514+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
25182515
"sqlState" : "22023",
25192516
"messageParameters" : {
2520-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
25212517
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got -1000000 nanoseconds"
25222518
}
25232519
}
@@ -2530,10 +2526,9 @@ struct<>
25302526
-- !query output
25312527
org.apache.spark.SparkDateTimeException
25322528
{
2533-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2529+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
25342530
"sqlState" : "22023",
25352531
"messageParameters" : {
2536-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
25372532
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got 86400000000000 nanoseconds"
25382533
}
25392534
}
@@ -2594,10 +2589,9 @@ struct<>
25942589
-- !query output
25952590
org.apache.spark.SparkDateTimeException
25962591
{
2597-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2592+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
25982593
"sqlState" : "22023",
25992594
"messageParameters" : {
2600-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
26012595
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got -1000 nanoseconds"
26022596
}
26032597
}
@@ -2610,10 +2604,9 @@ struct<>
26102604
-- !query output
26112605
org.apache.spark.SparkDateTimeException
26122606
{
2613-
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITH_SUGGESTION",
2607+
"errorClass" : "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
26142608
"sqlState" : "22023",
26152609
"messageParameters" : {
2616-
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
26172610
"rangeMessage" : "Invalid TIME value: must be between 00:00:00 and 23:59:59.999999999, but got 86400000000000 nanoseconds"
26182611
}
26192612
}

0 commit comments

Comments
 (0)