Skip to content

Commit

Permalink
Fix the time zone check issue (#5767)
Browse files Browse the repository at this point in the history
There are two settings for time zone, the Spark session local time zone and JVM's default time zone. And they may be different from each other according to issue #5678.

With this PR, plugin now requires both of the two settings being equal to UTC to support timestamp type.

Signed-off-by: Firestarman <firestarmanllc@gmail.com>
  • Loading branch information
firestarman authored Jun 14, 2022
1 parent a17e3e1 commit 04991c6
Show file tree
Hide file tree
Showing 8 changed files with 34 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,6 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging {
case _: DayTimeIntervalType => // Supported
}
}
checkTimeZoneId(timeAdd.timeZoneId)
}

override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ trait Spark330PlusShims extends Spark321PlusShims with Spark320PlusNonDBShims {
case _: DayTimeIntervalType => // Supported
}
}
checkTimeZoneId(timeAdd.timeZoneId)
}

override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,7 @@ object GpuCSVScan {
}

if (types.contains(TimestampType)) {
if (!TypeChecks.areTimestampsSupported(parsedOptions.zoneId)) {
meta.willNotWorkOnGpu("Only UTC zone id is supported")
}
meta.checkTimeZoneId(parsedOptions.zoneId)
GpuTextBasedDateUtils.tagCudfFormat(meta,
GpuCsvUtils.timestampFormatInRead(parsedOptions), parseString = true)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,9 @@ object GpuOverrides extends Logging {
case FloatType => true
case DoubleType => true
case DateType => true
case TimestampType => TypeChecks.areTimestampsSupported(ZoneId.systemDefault())
case TimestampType =>
TypeChecks.areTimestampsSupported(ZoneId.systemDefault()) &&
TypeChecks.areTimestampsSupported(SQLConf.get.sessionLocalTimeZone)
case StringType => true
case dt: DecimalType if allowDecimal => dt.precision <= DType.DECIMAL64_MAX_PRECISION
case NullType => allowNull
Expand Down Expand Up @@ -1702,7 +1704,6 @@ object GpuOverrides extends Logging {
willNotWorkOnGpu("interval months isn't supported")
}
}
checkTimeZoneId(timeAdd.timeZoneId)
}

override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression =
Expand All @@ -1724,7 +1725,6 @@ object GpuOverrides extends Logging {
willNotWorkOnGpu("interval months isn't supported")
}
}
checkTimeZoneId(dateAddInterval.timeZoneId)
}

override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression =
Expand Down Expand Up @@ -1785,9 +1785,6 @@ object GpuOverrides extends Logging {
ExprChecks.unaryProject(TypeSig.INT, TypeSig.INT,
TypeSig.TIMESTAMP, TypeSig.TIMESTAMP),
(hour, conf, p, r) => new UnaryExprMeta[Hour](hour, conf, p, r) {
override def tagExprForGpu(): Unit = {
checkTimeZoneId(hour.timeZoneId)
}

override def convertToGpu(expr: Expression): GpuExpression = GpuHour(expr)
}),
Expand All @@ -1796,9 +1793,6 @@ object GpuOverrides extends Logging {
ExprChecks.unaryProject(TypeSig.INT, TypeSig.INT,
TypeSig.TIMESTAMP, TypeSig.TIMESTAMP),
(minute, conf, p, r) => new UnaryExprMeta[Minute](minute, conf, p, r) {
override def tagExprForGpu(): Unit = {
checkTimeZoneId(minute.timeZoneId)
}

override def convertToGpu(expr: Expression): GpuExpression =
GpuMinute(expr)
Expand All @@ -1808,9 +1802,6 @@ object GpuOverrides extends Logging {
ExprChecks.unaryProject(TypeSig.INT, TypeSig.INT,
TypeSig.TIMESTAMP, TypeSig.TIMESTAMP),
(second, conf, p, r) => new UnaryExprMeta[Second](second, conf, p, r) {
override def tagExprForGpu(): Unit = {
checkTimeZoneId(second.timeZoneId)
}

override def convertToGpu(expr: Expression): GpuExpression =
GpuSecond(expr)
Expand Down
21 changes: 15 additions & 6 deletions sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import scala.collection.mutable

import com.nvidia.spark.rapids.shims.SparkShimImpl

import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, BinaryExpression, ComplexTypeMergingExpression, Expression, QuaternaryExpression, String2TrimExpression, TernaryExpression, UnaryExpression, WindowExpression, WindowFunction}
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, BinaryExpression, ComplexTypeMergingExpression, Expression, QuaternaryExpression, String2TrimExpression, TernaryExpression, TimeZoneAwareExpression, UnaryExpression, WindowExpression, WindowFunction}
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, AggregateFunction, ImperativeAggregate, TypedImperativeAggregate}
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
Expand Down Expand Up @@ -367,11 +367,16 @@ abstract class RapidsMeta[INPUT <: BASE, BASE, OUTPUT <: BASE](
}
}

protected def checkTimeZoneId(timeZoneId: Option[String]): Unit = {
timeZoneId.foreach { zoneId =>
if (!TypeChecks.areTimestampsSupported(ZoneId.systemDefault())) {
willNotWorkOnGpu(s"Only UTC zone id is supported. Actual zone id: $zoneId")
}
def checkTimeZoneId(sessionZoneId: ZoneId): Unit = {
// Both of the Spark session time zone and JVM's default time zone should be UTC.
if (!TypeChecks.areTimestampsSupported(sessionZoneId)) {
willNotWorkOnGpu("Only UTC zone id is supported. " +
s"Actual session local zone id: $sessionZoneId")
}

val defaultZoneId = ZoneId.systemDefault()
if (!TypeChecks.areTimestampsSupported(defaultZoneId)) {
willNotWorkOnGpu(s"Only UTC zone id is supported. Actual default zone id: $defaultZoneId")
}
}

Expand Down Expand Up @@ -987,6 +992,10 @@ abstract class BaseExprMeta[INPUT <: Expression](
s"$wrapped is foldable and operates on non literals")
}
rule.getChecks.foreach(_.tag(this))
wrapped match {
case tzAware: TimeZoneAwareExpression => checkTimeZoneId(tzAware.zoneId)
case _ => // do nothing
}
tagExprForGpu()
}

Expand Down
19 changes: 14 additions & 5 deletions sql-plugin/src/main/scala/com/nvidia/spark/rapids/TypeChecks.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ import ai.rapids.cudf.DType
import com.nvidia.spark.rapids.shims.{GpuTypeShims, TypeSigUtil}

import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, UnaryExpression, WindowSpecDefinition}
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._

/** Trait of TypeSigUtil for different spark versions */
Expand Down Expand Up @@ -358,7 +360,8 @@ final class TypeSig private(
case DoubleType => check.contains(TypeEnum.DOUBLE)
case DateType => check.contains(TypeEnum.DATE)
case TimestampType if check.contains(TypeEnum.TIMESTAMP) =>
TypeChecks.areTimestampsSupported(ZoneId.systemDefault())
TypeChecks.areTimestampsSupported(ZoneId.systemDefault()) &&
TypeChecks.areTimestampsSupported(SQLConf.get.sessionLocalTimeZone)
case StringType => check.contains(TypeEnum.STRING)
case dt: DecimalType =>
check.contains(TypeEnum.DECIMAL) &&
Expand Down Expand Up @@ -419,10 +422,11 @@ final class TypeSig private(
basicNotSupportedMessage(dataType, TypeEnum.DATE, check, isChild)
case TimestampType =>
if (check.contains(TypeEnum.TIMESTAMP) &&
(!TypeChecks.areTimestampsSupported(ZoneId.systemDefault()))) {
Seq(withChild(isChild, s"$dataType is not supported when the JVM system " +
s"timezone is set to ${ZoneId.systemDefault()}. Set the timezone to UTC to enable " +
s"$dataType support"))
(!TypeChecks.areTimestampsSupported(ZoneId.systemDefault()) ||
!TypeChecks.areTimestampsSupported(SQLConf.get.sessionLocalTimeZone))) {
Seq(withChild(isChild, s"$dataType is not supported with timezone settings: (JVM:" +
s" ${ZoneId.systemDefault()}, session: ${SQLConf.get.sessionLocalTimeZone})." +
s" Set both of the timezones to UTC to enable $dataType support"))
} else {
basicNotSupportedMessage(dataType, TypeEnum.TIMESTAMP, check, isChild)
}
Expand Down Expand Up @@ -796,6 +800,11 @@ object TypeChecks {
def areTimestampsSupported(timezoneId: ZoneId): Boolean = {
timezoneId.normalized() == GpuOverrides.UTC_TIMEZONE_ID
}

def areTimestampsSupported(zoneIdString: String): Boolean = {
val zoneId = DateTimeUtils.getZoneId(zoneIdString)
areTimestampsSupported(zoneId)
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,7 @@ object GpuJsonScan {
}

if (types.contains(TimestampType)) {
if (!TypeChecks.areTimestampsSupported(parsedOptions.zoneId)) {
meta.willNotWorkOnGpu("Only UTC zone id is supported")
}
meta.checkTimeZoneId(parsedOptions.zoneId)
GpuTextBasedDateUtils.tagCudfFormat(meta,
GpuJsonUtils.timestampFormatInRead(parsedOptions), parseString = true)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -377,8 +377,6 @@ abstract class UnixTimeExprMeta[A <: BinaryExpression with TimeZoneAwareExpressi
var sparkFormat: String = _
var strfFormat: String = _
override def tagExprForGpu(): Unit = {
checkTimeZoneId(expr.timeZoneId)

// Date and Timestamp work too
if (expr.right.dataType == StringType) {
extractStringLit(expr.right) match {
Expand Down

0 comments on commit 04991c6

Please sign in to comment.