Skip to content

Commit

Permalink
add shim
Browse files Browse the repository at this point in the history
Signed-off-by: Allen Xu <allxu@nvidia.com>
  • Loading branch information
wjxiz1992 committed Jul 25, 2022
1 parent e041304 commit 8bbf530
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 170 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package org.apache.spark.sql.types.shims

import java.time.ZoneId

import org.apache.spark.sql.types.DataType

object PartitionValueCastShims {
def isSupportedType(dt: DataType): Boolean = false

def castTo(desiredType: DataType, value: String, zoneId: ZoneId): Any = {
throw new IllegalArgumentException(s"Unexpected type $desiredType")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package org.apache.spark.sql.types.shims

import java.time.ZoneId

import scala.util.Try

import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.unescapePathName
import org.apache.spark.sql.catalyst.expressions.{Cast, Literal}
import org.apache.spark.sql.types.{AnsiIntervalType, AnyTimestampType, DataType, DateType}

object PartitionValueCastShims {
def isSupportedType(dt: DataType): Boolean = dt match {
// Timestamp types
case dt if AnyTimestampType.acceptsType(dt) => true
case it: AnsiIntervalType => true
case _ => false
}

// only for TimestampType TimestampNTZType
def castTo(desiredType: DataType, value: String, zoneId: ZoneId): Any = desiredType match {
case dt if AnyTimestampType.acceptsType(desiredType) =>
Try {
Cast(Literal(unescapePathName(value)), dt, Some(zoneId.getId)).eval()
}.getOrElse {
Cast(Cast(Literal(value), DateType, Some(zoneId.getId)), dt).eval()
}
}
}

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,16 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.{unescapePathN
import org.apache.spark.sql.catalyst.expressions.{Cast, Literal}
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils, TimestampFormatter}
import org.apache.spark.sql.catalyst.util.rapids.DateFormatter
import org.apache.spark.sql.errors.rapids.QueryExecutionErrors
import org.apache.spark.sql.execution.datasources.{PartitionPath, PartitionSpec}
import org.apache.spark.sql.execution.datasources.PartitioningAwareFileIndex.BASE_PATH_PARAM
import org.apache.spark.sql.execution.datasources.PartitioningUtils.timestampPartitionPattern
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.rapids.{AnsiIntervalType, AnyTimestampType}
import org.apache.spark.sql.types.shims.PartitionValueCastShims
import org.apache.spark.unsafe.types.UTF8String




object GpuPartitioningUtils extends SQLConfHelper {

case class TypedPartValue(value: String, dataType: DataType)
Expand Down Expand Up @@ -181,18 +181,11 @@ object GpuPartitioningUtils extends SQLConfHelper {
case _: DecimalType => Literal(new JBigDecimal(value)).value
case DateType =>
Cast(Literal(value), DateType, Some(zoneId.getId)).eval()
// Timestamp types
case dt if AnyTimestampType.acceptsType(dt) =>
Try {
Cast(Literal(unescapePathName(value)), dt, Some(zoneId.getId)).eval()
}.getOrElse {
Cast(Cast(Literal(value), DateType, Some(zoneId.getId)), dt).eval()
}
case it: AnsiIntervalType =>
Cast(Literal(unescapePathName(value)), it).eval()
case BinaryType => value.getBytes()
case BooleanType => value.toBoolean
case dt => throw QueryExecutionErrors.typeUnsupportedError(dt)
case t if PartitionValueCastShims.isSupportedType(t) =>
PartitionValueCastShims.castTo(t, value, zoneId)
case dt => throw new IllegalArgumentException(s"Unexpected type $dt")
}

/**
Expand Down Expand Up @@ -321,8 +314,8 @@ object GpuPartitioningUtils extends SQLConfHelper {
} catch {
case NonFatal(_) =>
if (validatePartitionColumns) {
throw QueryExecutionErrors.failedToCastValueToDataTypeForPartitionColumnError(
typedValue.value, typedValue.dataType, columnName)
throw new RuntimeException(s"Failed to cast value `$typedValue.value` to " +
s"`$typedValue.dataType` for partition column `$columnName`")
} else null
}
}
Expand Down

This file was deleted.

This file was deleted.

0 comments on commit 8bbf530

Please sign in to comment.