diff --git a/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShims.scala b/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShims.scala index dbabd1b1228..f7e6ec82ec1 100644 --- a/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShims.scala +++ b/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShims.scala @@ -130,7 +130,7 @@ abstract class Spark30XdbShims extends Spark30XdbShimsBase with Logging { "Databricks-specific window function exec, for \"running\" windows, " + "i.e. (UNBOUNDED PRECEDING TO CURRENT ROW)", ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all, Map("partitionSpec" -> @@ -142,7 +142,7 @@ abstract class Spark30XdbShims extends Spark30XdbShimsBase with Logging { GpuOverrides.exec[FileSourceScanExec]( "Reading data from files, often from Hive tables", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart @@ -297,11 +297,11 @@ abstract class Spark30XdbShims extends Spark30XdbShimsBase with Logging { GpuOverrides.expr[Average]( "Average aggregate operator", ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric))), + TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128, + TypeSig.cpuNumeric))), (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { override def tagAggForGpu(): Unit = { // For Decimal Average the SUM adds a precision of 10 to avoid overflowing @@ -335,8 +335,8 @@ abstract class Spark30XdbShims extends Spark30XdbShimsBase with Logging { GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric), + TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, + TypeSig.cpuNumeric), (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, false) diff --git a/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShimsBase.scala b/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShimsBase.scala index ecc1203b466..51d568f0334 100644 --- a/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShimsBase.scala +++ b/sql-plugin/src/main/301db/scala/com/nvidia/spark/rapids/shims/v2/Spark30XdbShimsBase.scala @@ -96,7 +96,7 @@ trait Spark30XdbShimsBase extends SparkShims { override def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = exec[CustomShuffleReaderExec]( "A wrapper of shuffle query stage", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (exec, conf, p, r) => new GpuCustomShuffleReaderMeta(exec, conf, p, r)) diff --git a/sql-plugin/src/main/301until310-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark30XShims.scala b/sql-plugin/src/main/301until310-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark30XShims.scala index e78fd01e182..c695e4e61d1 100644 --- a/sql-plugin/src/main/301until310-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark30XShims.scala +++ b/sql-plugin/src/main/301until310-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark30XShims.scala @@ -121,7 +121,7 @@ abstract class Spark30XShims extends Spark301until320Shims with Logging { GpuOverrides.exec[FileSourceScanExec]( "Reading data from files, often from Hive tables", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart @@ -243,11 +243,11 @@ abstract class Spark30XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Average]( "Average aggregate operator", ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric))), + TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128, + TypeSig.cpuNumeric))), (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { override def tagAggForGpu(): Unit = { // For Decimal Average the SUM adds a precision of 10 to avoid overflowing @@ -281,8 +281,8 @@ abstract class Spark30XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric), + TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, + TypeSig.cpuNumeric), (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, false) diff --git a/sql-plugin/src/main/301until320-all/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala b/sql-plugin/src/main/301until320-all/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala index e763d3283c3..baa4d60b756 100644 --- a/sql-plugin/src/main/301until320-all/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala +++ b/sql-plugin/src/main/301until320-all/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala @@ -65,5 +65,5 @@ object TypeSigUtil extends TypeSigUtilBase { /** Get numeric and interval TypeSig */ override def getNumericAndInterval(): TypeSig = - TypeSig.numeric + TypeSig.CALENDAR + TypeSig.cpuNumeric + TypeSig.CALENDAR } diff --git a/sql-plugin/src/main/301until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark301until320Shims.scala b/sql-plugin/src/main/301until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark301until320Shims.scala index 99ddc32143a..e8196b70a3e 100644 --- a/sql-plugin/src/main/301until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark301until320Shims.scala +++ b/sql-plugin/src/main/301until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark301until320Shims.scala @@ -118,7 +118,7 @@ trait Spark301until320Shims extends SparkShims { override def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = exec[CustomShuffleReaderExec]( "A wrapper of shuffle query stage", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (exec, conf, p, r) => new GpuCustomShuffleReaderMeta(exec, conf, p, r)) diff --git a/sql-plugin/src/main/311until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XShims.scala b/sql-plugin/src/main/311until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XShims.scala index c85a7941f98..1685bbf94c8 100644 --- a/sql-plugin/src/main/311until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XShims.scala +++ b/sql-plugin/src/main/311until320-nondb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XShims.scala @@ -113,15 +113,15 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { import TypeSig._ // nullChecks are the same - override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING + DECIMAL_128_FULL - override val sparkBooleanSig: TypeSig = numeric + BOOLEAN + STRING + override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING + DECIMAL_128 + override val sparkBooleanSig: TypeSig = cpuNumeric + BOOLEAN + STRING - override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING + DECIMAL_128_FULL - override val sparkIntegralSig: TypeSig = numeric + BOOLEAN + STRING + override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING + override val sparkIntegralSig: TypeSig = cpuNumeric + BOOLEAN + STRING - override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING + DECIMAL_128_FULL) + override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING) .withPsNote(TypeEnum.STRING, fpToStringPsNote) - override val sparkFpSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkFpSig: TypeSig = cpuNumeric + BOOLEAN + STRING override val dateChecks: TypeSig = TIMESTAMP + DATE + STRING override val sparkDateSig: TypeSig = TIMESTAMP + DATE + STRING @@ -131,25 +131,25 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { // stringChecks are the same // binaryChecks are the same - override val decimalChecks: TypeSig = gpuNumeric + DECIMAL_128_FULL + STRING - override val sparkDecimalSig: TypeSig = numeric + BOOLEAN + STRING + override val decimalChecks: TypeSig = gpuNumeric + STRING + override val sparkDecimalSig: TypeSig = cpuNumeric + BOOLEAN + STRING // calendarChecks are the same override val arrayChecks: TypeSig = - ARRAY.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + ARRAY.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to " + "the desired child type") override val sparkArraySig: TypeSig = ARRAY.nested(all) override val mapChecks: TypeSig = - MAP.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT + MAP) + + MAP.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + psNote(TypeEnum.MAP, "the map's key and value must also support being cast to the " + "desired child types") override val sparkMapSig: TypeSig = MAP.nested(all) override val structChecks: TypeSig = - STRUCT.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + STRUCT.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.STRUCT, "the struct's children must also support being cast to the " + "desired child type(s)") override val sparkStructSig: TypeSig = STRUCT.nested(all) @@ -162,11 +162,11 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Average]( "Average aggregate operator", ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric))), + TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128, + TypeSig.cpuNumeric))), (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { override def tagAggForGpu(): Unit = { // For Decimal Average the SUM adds a precision of 10 to avoid overflowing @@ -200,8 +200,8 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric), + TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, + TypeSig.cpuNumeric), (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, false) @@ -222,17 +222,17 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Lead]( "Window function that returns N entries ahead of this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -245,17 +245,17 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { GpuOverrides.expr[Lag]( "Window function that returns N entries behind this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -269,10 +269,10 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { "Gets the field at `ordinal` in the Array", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)), (in, conf, p, r) => new GpuGetArrayItemMeta(in, conf, p, r){ @@ -293,9 +293,9 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { "Returns value for the given key in value if column is map.", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), TypeSig.all, + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array/map", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP) + + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP) + TypeSig.MAP.nested(TypeSig.STRING) .withPsNote(TypeEnum.MAP ,"If it's map, only string is supported."), TypeSig.ARRAY.nested(TypeSig.all) + TypeSig.MAP.nested(TypeSig.all)), @@ -318,10 +318,10 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { // Match exactly with the checks for GetArrayItem ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)) case _ => throw new IllegalStateException("Only Array or Map is supported as input.") @@ -361,7 +361,7 @@ abstract class Spark31XShims extends Spark301until320Shims with Logging { GpuOverrides.exec[FileSourceScanExec]( "Reading data from files, often from Hive tables", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart diff --git a/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShims.scala b/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShims.scala index b6fa8996c5c..31ed3f1f429 100644 --- a/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShims.scala +++ b/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShims.scala @@ -112,15 +112,15 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { import TypeSig._ // nullChecks are the same - override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING + DECIMAL_128_FULL - override val sparkBooleanSig: TypeSig = numeric + BOOLEAN + STRING + override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING + DECIMAL_128 + override val sparkBooleanSig: TypeSig = cpuNumeric + BOOLEAN + STRING - override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING + DECIMAL_128_FULL - override val sparkIntegralSig: TypeSig = numeric + BOOLEAN + STRING + override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING + override val sparkIntegralSig: TypeSig = cpuNumeric + BOOLEAN + STRING - override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING + DECIMAL_128_FULL) + override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING) .withPsNote(TypeEnum.STRING, fpToStringPsNote) - override val sparkFpSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkFpSig: TypeSig = cpuNumeric + BOOLEAN + STRING override val dateChecks: TypeSig = TIMESTAMP + DATE + STRING override val sparkDateSig: TypeSig = TIMESTAMP + DATE + STRING @@ -130,25 +130,25 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { // stringChecks are the same // binaryChecks are the same - override val decimalChecks: TypeSig = gpuNumeric + DECIMAL_128_FULL + STRING - override val sparkDecimalSig: TypeSig = numeric + BOOLEAN + STRING + override val decimalChecks: TypeSig = gpuNumeric + STRING + override val sparkDecimalSig: TypeSig = cpuNumeric + BOOLEAN + STRING // calendarChecks are the same override val arrayChecks: TypeSig = - ARRAY.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + ARRAY.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to " + "the desired child type") override val sparkArraySig: TypeSig = ARRAY.nested(all) override val mapChecks: TypeSig = - MAP.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT + MAP) + + MAP.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + psNote(TypeEnum.MAP, "the map's key and value must also support being cast to the " + "desired child types") override val sparkMapSig: TypeSig = MAP.nested(all) override val structChecks: TypeSig = - STRUCT.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + STRUCT.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.STRUCT, "the struct's children must also support being cast to the " + "desired child type(s)") override val sparkStructSig: TypeSig = STRUCT.nested(all) @@ -161,11 +161,11 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { GpuOverrides.expr[Average]( "Average aggregate operator", ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric))), + TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128, + TypeSig.cpuNumeric))), (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { override def tagAggForGpu(): Unit = { // For Decimal Average the SUM adds a precision of 10 to avoid overflowing @@ -199,8 +199,8 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric), + TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, + TypeSig.cpuNumeric), (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, false) @@ -221,17 +221,17 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { GpuOverrides.expr[Lead]( "Window function that returns N entries ahead of this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -244,17 +244,17 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { GpuOverrides.expr[Lag]( "Window function that returns N entries behind this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -268,10 +268,10 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { "Gets the field at `ordinal` in the Array", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)), (in, conf, p, r) => new GpuGetArrayItemMeta(in, conf, p, r){ @@ -292,9 +292,9 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { "Returns value for the given key in value if column is map.", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), TypeSig.all, + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array/map", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP) + + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP) + TypeSig.MAP.nested(TypeSig.STRING) .withPsNote(TypeEnum.MAP ,"If it's map, only string is supported."), TypeSig.ARRAY.nested(TypeSig.all) + TypeSig.MAP.nested(TypeSig.all)), @@ -317,10 +317,10 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { // Match exactly with the checks for GetArrayItem ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)) case _ => throw new IllegalStateException("Only Array or Map is supported as input.") @@ -361,7 +361,7 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { "Databricks-specific window function exec, for \"running\" windows, " + "i.e. (UNBOUNDED PRECEDING TO CURRENT ROW)", ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all, Map("partitionSpec" -> @@ -373,7 +373,7 @@ abstract class Spark31XdbShims extends Spark31XdbShimsBase with Logging { GpuOverrides.exec[FileSourceScanExec]( "Reading data from files, often from Hive tables", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart diff --git a/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShimsBase.scala b/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShimsBase.scala index 0885e35b57d..d01fcb80296 100644 --- a/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShimsBase.scala +++ b/sql-plugin/src/main/31xdb/scala/com/nvidia/spark/rapids/shims/v2/Spark31XdbShimsBase.scala @@ -96,7 +96,7 @@ trait Spark31XdbShimsBase extends SparkShims { override def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = exec[CustomShuffleReaderExec]( "A wrapper of shuffle query stage", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (exec, conf, p, r) => new GpuCustomShuffleReaderMeta(exec, conf, p, r)) diff --git a/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/Spark320PlusShims.scala b/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/Spark320PlusShims.scala index 019bd40e1a1..56173588a3e 100644 --- a/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/Spark320PlusShims.scala +++ b/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/Spark320PlusShims.scala @@ -76,7 +76,7 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { override final def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = exec[AQEShuffleReadExec]( "A wrapper of shuffle query stage", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (exec, conf, p, r) => new GpuCustomShuffleReaderMeta(exec, conf, p, r)) @@ -200,14 +200,14 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { // nullChecks are the same override val booleanChecks: TypeSig = integral + fp + BOOLEAN + STRING - override val sparkBooleanSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkBooleanSig: TypeSig = cpuNumeric + BOOLEAN + STRING override val integralChecks: TypeSig = gpuNumeric + BOOLEAN + STRING - override val sparkIntegralSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkIntegralSig: TypeSig = cpuNumeric + BOOLEAN + STRING override val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + STRING) .withPsNote(TypeEnum.STRING, fpToStringPsNote) - override val sparkFpSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkFpSig: TypeSig = cpuNumeric + BOOLEAN + STRING override val dateChecks: TypeSig = TIMESTAMP + DATE + STRING override val sparkDateSig: TypeSig = TIMESTAMP + DATE + STRING @@ -224,24 +224,24 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { // binaryChecks are the same override val decimalChecks: TypeSig = gpuNumeric + STRING - override val sparkDecimalSig: TypeSig = numeric + BOOLEAN + STRING + override val sparkDecimalSig: TypeSig = cpuNumeric + BOOLEAN + STRING // calendarChecks are the same override val arrayChecks: TypeSig = - ARRAY.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + ARRAY.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to " + "the desired child type") override val sparkArraySig: TypeSig = ARRAY.nested(all) override val mapChecks: TypeSig = - MAP.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT + MAP) + + MAP.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + psNote(TypeEnum.MAP, "the map's key and value must also support being cast to the " + "desired child types") override val sparkMapSig: TypeSig = MAP.nested(all) override val structChecks: TypeSig = - STRUCT.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT) + + STRUCT.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT) + psNote(TypeEnum.STRUCT, "the struct's children must also support being cast to the " + "desired child type(s)") override val sparkStructSig: TypeSig = STRUCT.nested(all) @@ -254,12 +254,12 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { GpuOverrides.expr[Average]( "Average aggregate operator", ExprChecks.fullAgg( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128, // NullType is not technically allowed by Spark, but in practice in 3.2.0 // it can show up Seq(ParamCheck("input", - TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.integral + TypeSig.fp + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.numericAndInterval + TypeSig.NULL))), (a, conf, p, r) => new AggExprMeta[Average](a, conf, p, r) { override def tagAggForGpu(): Unit = { @@ -294,8 +294,8 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( - TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric), + TypeSig.implicitCastsAstTypes, TypeSig.gpuNumeric, + TypeSig.cpuNumeric), (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { val ansiEnabled = SQLConf.get.ansiEnabled @@ -322,17 +322,17 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { GpuOverrides.expr[Lead]( "Window function that returns N entries ahead of this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -345,17 +345,17 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { GpuOverrides.expr[Lag]( "Window function that returns N entries behind this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -369,10 +369,10 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { "Gets the field at `ordinal` in the Array", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)), (in, conf, p, r) => new GpuGetArrayItemMeta(in, conf, p, r) { @@ -393,9 +393,9 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { "Returns value for the given key in value if column is map.", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), TypeSig.all, + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array/map", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP) + + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP) + TypeSig.MAP.nested(TypeSig.STRING) .withPsNote(TypeEnum.MAP, "If it's map, only string is supported."), TypeSig.ARRAY.nested(TypeSig.all) + TypeSig.MAP.nested(TypeSig.all)), @@ -418,10 +418,10 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { // Match exactly with the checks for GetArrayItem ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)) case _ => throw new IllegalStateException("Only Array or Map is supported as input.") @@ -437,9 +437,9 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { "Holds a static value from the query", ExprChecks.projectAndAst( TypeSig.astTypes, - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.CALENDAR + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.CALENDAR + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT + TypeSig.DAYTIME) - .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT), TypeSig.all), (lit, conf, p, r) => new LiteralExprMeta(lit, conf, p, r)), @@ -485,11 +485,11 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { "Calculates a return value for every input row of a table based on a group (or " + "\"window\") of rows", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all, Seq(ParamCheck("windowFunction", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), ParamCheck("windowSpec", @@ -525,7 +525,7 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { GpuOverrides.exec[FileSourceScanExec]( "Reading data from files, often from Hive tables", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (fsse, conf, p, r) => new SparkPlanMeta[FileSourceScanExec](fsse, conf, p, r) { // Replaces SubqueryBroadcastExec inside dynamic pruning filters with GPU counterpart @@ -638,7 +638,7 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { "The backend for most file input", ExecChecks( (TypeSig.commonCudfTypes + TypeSig.STRUCT + TypeSig.MAP + TypeSig.ARRAY + - TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.DECIMAL_128).nested(), TypeSig.all), (p, conf, parent, r) => new SparkPlanMeta[BatchScanExec](p, conf, parent, r) { override val childScans: scala.Seq[ScanMeta[_]] = diff --git a/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala b/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala index 5c484c7d40b..77b4f12fbf4 100644 --- a/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala +++ b/sql-plugin/src/main/320+/scala/com/nvidia/spark/rapids/shims/v2/TypeSigUtil.scala @@ -60,5 +60,5 @@ object TypeSigUtil extends TypeSigUtilBase { /** Get numeric and interval TypeSig */ override def getNumericAndInterval(): TypeSig = - TypeSig.numeric + TypeSig.CALENDAR + TypeSig.DAYTIME + TypeSig.YEARMONTH + TypeSig.cpuNumeric + TypeSig.CALENDAR + TypeSig.DAYTIME + TypeSig.YEARMONTH } diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala index ea0a87c8153..951c8105d48 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala @@ -821,21 +821,21 @@ object GpuOverrides extends Logging { (CsvFormatType, FileFormatChecks( cudfRead = TypeSig.commonCudfTypes, cudfWrite = TypeSig.none, - sparkSig = TypeSig.atomics)), + sparkSig = TypeSig.cpuAtomics)), (ParquetFormatType, FileFormatChecks( - cudfRead = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT + + cudfRead = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), - cudfWrite = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT + + cudfWrite = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), - sparkSig = (TypeSig.atomics + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + + sparkSig = (TypeSig.cpuAtomics + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + TypeSig.UDT).nested())), (OrcFormatType, FileFormatChecks( - cudfRead = (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL + + cudfRead = (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.MAP).nested(), cudfWrite = (TypeSig.commonCudfTypes + TypeSig.ARRAY + // Note Map is not put into nested, now CUDF only support single level map - TypeSig.STRUCT + TypeSig.DECIMAL_128_FULL).nested() + TypeSig.MAP, - sparkSig = (TypeSig.atomics + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + + TypeSig.STRUCT + TypeSig.DECIMAL_128).nested() + TypeSig.MAP, + sparkSig = (TypeSig.cpuAtomics + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + TypeSig.UDT).nested()))) val commonExpressions: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( @@ -843,9 +843,9 @@ object GpuOverrides extends Logging { "Holds a static value from the query", ExprChecks.projectAndAst( TypeSig.astTypes, - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.CALENDAR + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.CALENDAR + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT) - .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT), TypeSig.all), (lit, conf, p, r) => new LiteralExprMeta(lit, conf, p, r)), @@ -860,7 +860,7 @@ object GpuOverrides extends Logging { ExprChecks.unaryProjectAndAstInputMatchesOutput( TypeSig.astTypes, (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT - + TypeSig.DECIMAL_128_FULL).nested(), + + TypeSig.DECIMAL_128).nested(), TypeSig.all), (a, conf, p, r) => new UnaryAstExprMeta[Alias](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = @@ -871,7 +871,7 @@ object GpuOverrides extends Logging { ExprChecks.projectAndAst( TypeSig.astTypes, (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.MAP + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.STRUCT + TypeSig.DECIMAL_128).nested(), TypeSig.all), (att, conf, p, r) => new BaseExprMeta[AttributeReference](att, conf, p, r) { // This is the only NOOP operator. It goes away when things are bound @@ -887,15 +887,15 @@ object GpuOverrides extends Logging { }), expr[PromotePrecision]( "PromotePrecision before arithmetic operations between DecimalType data", - ExprChecks.unaryProjectInputMatchesOutput(TypeSig.DECIMAL_128_FULL, - TypeSig.DECIMAL_128_FULL), + ExprChecks.unaryProjectInputMatchesOutput(TypeSig.DECIMAL_128, + TypeSig.DECIMAL_128), (a, conf, p, r) => new UnaryExprMeta[PromotePrecision](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = GpuPromotePrecision(child) }), expr[CheckOverflow]( "CheckOverflow after arithmetic operations between DecimalType data", - ExprChecks.unaryProjectInputMatchesOutput(TypeSig.DECIMAL_128_FULL, - TypeSig.DECIMAL_128_FULL), + ExprChecks.unaryProjectInputMatchesOutput(TypeSig.DECIMAL_128, + TypeSig.DECIMAL_128), (a, conf, p, r) => new ExprMeta[CheckOverflow](a, conf, p, r) { private[this] def extractOrigParam(expr: BaseExprMeta[_]): BaseExprMeta[_] = expr.wrapped match { @@ -1041,11 +1041,11 @@ object GpuOverrides extends Logging { "Calculates a return value for every input row of a table based on a group (or " + "\"window\") of rows", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all, Seq(ParamCheck("windowFunction", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), ParamCheck("windowSpec", @@ -1102,7 +1102,7 @@ object GpuOverrides extends Logging { ExprChecks.windowOnly(TypeSig.INT, TypeSig.INT, repeatingParamCheck = Some(RepeatingParamCheck("ordering", - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.all))), (rank, conf, p, r) => new ExprMeta[Rank](rank, conf, p, r) { override def convertToGpu(): GpuExpression = GpuRank(childExprs.map(_.convertToGpu())) @@ -1112,7 +1112,7 @@ object GpuOverrides extends Logging { ExprChecks.windowOnly(TypeSig.INT, TypeSig.INT, repeatingParamCheck = Some(RepeatingParamCheck("ordering", - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.all))), (denseRank, conf, p, r) => new ExprMeta[DenseRank](denseRank, conf, p, r) { override def convertToGpu(): GpuExpression = GpuDenseRank(childExprs.map(_.convertToGpu())) @@ -1120,17 +1120,17 @@ object GpuOverrides extends Logging { expr[Lead]( "Window function that returns N entries ahead of this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -1142,17 +1142,17 @@ object GpuOverrides extends Logging { expr[Lag]( "Window function that returns N entries behind this one", ExprChecks.windowOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, Seq( ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all), ParamCheck("offset", TypeSig.INT, TypeSig.INT), ParamCheck("default", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all) ) @@ -1177,7 +1177,7 @@ object GpuOverrides extends Logging { "Negate a numeric value", ExprChecks.unaryProjectAndAstInputMatchesOutput( TypeSig.implicitCastsAstTypes, - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + TypeSig.gpuNumeric, TypeSig.numericAndInterval), (a, conf, p, r) => new UnaryAstExprMeta[UnaryMinus](a, conf, p, r) { val ansiEnabled = SQLConf.get.ansiEnabled @@ -1195,7 +1195,7 @@ object GpuOverrides extends Logging { "A numeric value with a + in front of it", ExprChecks.unaryProjectAndAstInputMatchesOutput( TypeSig.astTypes, - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + TypeSig.gpuNumeric, TypeSig.numericAndInterval), (a, conf, p, r) => new UnaryAstExprMeta[UnaryPositive](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = GpuUnaryPositive(child) @@ -1288,8 +1288,8 @@ object GpuOverrides extends Logging { expr[Floor]( "Floor of a number", ExprChecks.unaryProjectInputMatchesOutput( - TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128_FULL), + TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128), (a, conf, p, r) => new UnaryExprMeta[Floor](a, conf, p, r) { override def tagExprForGpu(): Unit = { a.dataType match { @@ -1308,8 +1308,8 @@ object GpuOverrides extends Logging { expr[Ceil]( "Ceiling of a number", ExprChecks.unaryProjectInputMatchesOutput( - TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128_FULL), + TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.LONG + TypeSig.DECIMAL_128), (a, conf, p, r) => new UnaryExprMeta[Ceil](a, conf, p, r) { override def tagExprForGpu(): Unit = { a.dataType match { @@ -1336,7 +1336,7 @@ object GpuOverrides extends Logging { "Checks if a value is null", ExprChecks.unaryProject(TypeSig.BOOLEAN, TypeSig.BOOLEAN, (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.MAP + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.STRUCT + TypeSig.DECIMAL_128).nested(), TypeSig.all), (a, conf, p, r) => new UnaryExprMeta[IsNull](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = GpuIsNull(child) @@ -1345,7 +1345,7 @@ object GpuOverrides extends Logging { "Checks if a value is not null", ExprChecks.unaryProject(TypeSig.BOOLEAN, TypeSig.BOOLEAN, (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.MAP + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.STRUCT + TypeSig.DECIMAL_128).nested(), TypeSig.all), (a, conf, p, r) => new UnaryExprMeta[IsNotNull](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = GpuIsNotNull(child) @@ -1374,7 +1374,7 @@ object GpuOverrides extends Logging { "Checks if number of non null/Nan values is greater than a given value", ExprChecks.projectOnly(TypeSig.BOOLEAN, TypeSig.BOOLEAN, repeatingParamCheck = Some(RepeatingParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all))), (a, conf, p, r) => new ExprMeta[AtLeastNNonNulls](a, conf, p, r) { @@ -1472,10 +1472,10 @@ object GpuOverrides extends Logging { expr[Coalesce] ( "Returns the first non-null argument if exists. Otherwise, null", ExprChecks.projectOnly( - (_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), + (_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all, repeatingParamCheck = Some(RepeatingParamCheck("param", - (_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), + (_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all))), (a, conf, p, r) => new ExprMeta[Coalesce](a, conf, p, r) { override def convertToGpu(): GpuExpression = GpuCoalesce(childExprs.map(_.convertToGpu())) @@ -1483,9 +1483,9 @@ object GpuOverrides extends Logging { expr[Least] ( "Returns the least value of all parameters, skipping null values", ExprChecks.projectOnly( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, TypeSig.orderable, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable, repeatingParamCheck = Some(RepeatingParamCheck("param", - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable))), (a, conf, p, r) => new ExprMeta[Least](a, conf, p, r) { override def convertToGpu(): GpuExpression = GpuLeast(childExprs.map(_.convertToGpu())) @@ -1493,9 +1493,9 @@ object GpuOverrides extends Logging { expr[Greatest] ( "Returns the greatest value of all parameters, skipping null values", ExprChecks.projectOnly( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, TypeSig.orderable, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable, repeatingParamCheck = Some(RepeatingParamCheck("param", - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable))), (a, conf, p, r) => new ExprMeta[Greatest](a, conf, p, r) { override def convertToGpu(): GpuExpression = GpuGreatest(childExprs.map(_.convertToGpu())) @@ -1634,7 +1634,7 @@ object GpuOverrides extends Logging { expr[KnownNotNull]( "Tag an expression as known to not be null", ExprChecks.unaryProjectInputMatchesOutput( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.BINARY + TypeSig.CALENDAR + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.BINARY + TypeSig.CALENDAR + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT).nested(), TypeSig.all), (k, conf, p, r) => new UnaryExprMeta[KnownNotNull](k, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = @@ -1825,9 +1825,9 @@ object GpuOverrides extends Logging { }), expr[Pmod]( "Pmod", - ExprChecks.binaryProject(TypeSig.integral + TypeSig.fp, TypeSig.numeric, - ("lhs", TypeSig.integral + TypeSig.fp, TypeSig.numeric), - ("rhs", TypeSig.integral + TypeSig.fp, TypeSig.numeric)), + ExprChecks.binaryProject(TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric, + ("lhs", TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric), + ("rhs", TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric)), (a, conf, p, r) => new BinaryExprMeta[Pmod](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = GpuPmod(lhs, rhs) @@ -1836,10 +1836,10 @@ object GpuOverrides extends Logging { "Addition", ExprChecks.binaryProjectAndAst( TypeSig.implicitCastsAstTypes, - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numericAndInterval, - ("lhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + TypeSig.gpuNumeric, TypeSig.numericAndInterval, + ("lhs", TypeSig.gpuNumeric, TypeSig.numericAndInterval), - ("rhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.gpuNumeric, TypeSig.numericAndInterval)), (a, conf, p, r) => new BinaryAstExprMeta[Add](a, conf, p, r) { private val ansiEnabled = SQLConf.get.ansiEnabled @@ -1857,10 +1857,10 @@ object GpuOverrides extends Logging { "Subtraction", ExprChecks.binaryProjectAndAst( TypeSig.implicitCastsAstTypes, - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numericAndInterval, - ("lhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + TypeSig.gpuNumeric, TypeSig.numericAndInterval, + ("lhs", TypeSig.gpuNumeric, TypeSig.numericAndInterval), - ("rhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.gpuNumeric, TypeSig.numericAndInterval)), (a, conf, p, r) => new BinaryAstExprMeta[Subtract](a, conf, p, r) { private val ansiEnabled = SQLConf.get.ansiEnabled @@ -1878,12 +1878,12 @@ object GpuOverrides extends Logging { "Multiplication", ExprChecks.binaryProjectAndAst( TypeSig.implicitCastsAstTypes, - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + TypeSig.psNote(TypeEnum.DECIMAL, + TypeSig.gpuNumeric + TypeSig.psNote(TypeEnum.DECIMAL, "Because of Spark's inner workings the full range of decimal precision " + "(even for 128-bit values) is not supported."), - TypeSig.numeric, - ("lhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numeric), - ("rhs", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numeric)), + TypeSig.cpuNumeric, + ("lhs", TypeSig.gpuNumeric, TypeSig.cpuNumeric), + ("rhs", TypeSig.gpuNumeric, TypeSig.cpuNumeric)), (a, conf, p, r) => new BinaryAstExprMeta[Multiply](a, conf, p, r) { override def tagExprForGpu(): Unit = { if (SQLConf.get.ansiEnabled && GpuAnsi.needBasicOpOverflowCheck(a.dataType)) { @@ -1922,9 +1922,9 @@ object GpuOverrides extends Logging { "Check if the values are equal including nulls <=>", ExprChecks.binaryProject( TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable)), (a, conf, p, r) => new BinaryExprMeta[EqualNullSafe](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -1935,9 +1935,9 @@ object GpuOverrides extends Logging { ExprChecks.binaryProjectAndAst( TypeSig.comparisonAstTypes, TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable)), (a, conf, p, r) => new BinaryAstExprMeta[EqualTo](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -1948,9 +1948,9 @@ object GpuOverrides extends Logging { ExprChecks.binaryProjectAndAst( TypeSig.comparisonAstTypes, TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable)), (a, conf, p, r) => new BinaryAstExprMeta[GreaterThan](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -1961,9 +1961,9 @@ object GpuOverrides extends Logging { ExprChecks.binaryProjectAndAst( TypeSig.comparisonAstTypes, TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable)), (a, conf, p, r) => new BinaryAstExprMeta[GreaterThanOrEqual](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -1972,10 +1972,10 @@ object GpuOverrides extends Logging { expr[In]( "IN operator", ExprChecks.projectOnly(TypeSig.BOOLEAN, TypeSig.BOOLEAN, - Seq(ParamCheck("value", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + Seq(ParamCheck("value", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable)), Some(RepeatingParamCheck("list", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL).withAllLit(), + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128).withAllLit(), TypeSig.comparable))), (in, conf, p, r) => new ExprMeta[In](in, conf, p, r) { override def tagExprForGpu(): Unit = { @@ -1994,7 +1994,7 @@ object GpuOverrides extends Logging { expr[InSet]( "INSET operator", ExprChecks.unaryProject(TypeSig.BOOLEAN, TypeSig.BOOLEAN, - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, TypeSig.comparable), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.comparable), (in, conf, p, r) => new ExprMeta[InSet](in, conf, p, r) { override def tagExprForGpu(): Unit = { if (in.hset.contains(null)) { @@ -2009,9 +2009,9 @@ object GpuOverrides extends Logging { ExprChecks.binaryProjectAndAst( TypeSig.comparisonAstTypes, TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable)), (a, conf, p, r) => new BinaryAstExprMeta[LessThan](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -2022,9 +2022,9 @@ object GpuOverrides extends Logging { ExprChecks.binaryProjectAndAst( TypeSig.comparisonAstTypes, TypeSig.BOOLEAN, TypeSig.BOOLEAN, - ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("lhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable), - ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + ("rhs", TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, TypeSig.orderable)), (a, conf, p, r) => new BinaryAstExprMeta[LessThanOrEqual](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = @@ -2050,16 +2050,16 @@ object GpuOverrides extends Logging { expr[If]( "IF expression", ExprChecks.projectOnly( - (_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + + (_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all, Seq(ParamCheck("predicate", TypeSig.BOOLEAN, TypeSig.BOOLEAN), ParamCheck("trueValue", - (_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + + (_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), ParamCheck("falseValue", - (_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + + (_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all))), (a, conf, p, r) => new ExprMeta[If](a, conf, p, r) { @@ -2081,15 +2081,15 @@ object GpuOverrides extends Logging { expr[Divide]( "Division", ExprChecks.binaryProject( - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL + + TypeSig.DOUBLE + TypeSig.DECIMAL_128 + TypeSig.psNote(TypeEnum.DECIMAL, "Because of Spark's inner workings the full range of decimal precision " + "(even for 128-bit values) is not supported."), - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - ("lhs", TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL), - ("rhs", TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL)), + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + ("lhs", TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128), + ("rhs", TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.DOUBLE + TypeSig.DECIMAL_128)), (a, conf, p, r) => new BinaryExprMeta[Divide](a, conf, p, r) { // Division of Decimal types is a little odd. To work around some issues with // what Spark does the tagging/checks are in CheckOverflow instead of here. @@ -2106,8 +2106,8 @@ object GpuOverrides extends Logging { "Division with a integer result", ExprChecks.binaryProject( TypeSig.LONG, TypeSig.LONG, - ("lhs", TypeSig.LONG + TypeSig.DECIMAL_128_FULL, TypeSig.LONG + TypeSig.DECIMAL_128_FULL), - ("rhs", TypeSig.LONG + TypeSig.DECIMAL_128_FULL, TypeSig.LONG + TypeSig.DECIMAL_128_FULL)), + ("lhs", TypeSig.LONG + TypeSig.DECIMAL_128, TypeSig.LONG + TypeSig.DECIMAL_128), + ("rhs", TypeSig.LONG + TypeSig.DECIMAL_128, TypeSig.LONG + TypeSig.DECIMAL_128)), (a, conf, p, r) => new BinaryExprMeta[IntegralDivide](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = GpuIntegralDivide(lhs, rhs) @@ -2115,9 +2115,9 @@ object GpuOverrides extends Logging { expr[Remainder]( "Remainder or modulo", ExprChecks.binaryProject( - TypeSig.integral + TypeSig.fp, TypeSig.numeric, - ("lhs", TypeSig.integral + TypeSig.fp, TypeSig.numeric), - ("rhs", TypeSig.integral + TypeSig.fp, TypeSig.numeric)), + TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric, + ("lhs", TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric), + ("rhs", TypeSig.integral + TypeSig.fp, TypeSig.cpuNumeric)), (a, conf, p, r) => new BinaryExprMeta[Remainder](a, conf, p, r) { override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = GpuRemainder(lhs, rhs) @@ -2125,12 +2125,12 @@ object GpuOverrides extends Logging { expr[AggregateExpression]( "Aggregate expression", ExprChecks.fullAgg( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all, Seq(ParamCheck( "aggFunc", - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all)), Some(RepeatingParamCheck("filter", TypeSig.BOOLEAN, TypeSig.BOOLEAN))), @@ -2160,11 +2160,11 @@ object GpuOverrides extends Logging { expr[SortOrder]( "Sort order", ExprChecks.projectOnly( - (pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT).nested(), + (pluginSupportedOrderableSig + TypeSig.DECIMAL_128 + TypeSig.STRUCT).nested(), TypeSig.orderable, Seq(ParamCheck( "input", - (pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT).nested(), + (pluginSupportedOrderableSig + TypeSig.DECIMAL_128 + TypeSig.STRUCT).nested(), TypeSig.orderable))), (sortOrder, conf, p, r) => new BaseExprMeta[SortOrder](sortOrder, conf, p, r) { override def tagExprForGpu(): Unit = { @@ -2220,8 +2220,8 @@ object GpuOverrides extends Logging { ExprChecks.fullAgg( TypeSig.LONG, TypeSig.LONG, repeatingParamCheck = Some(RepeatingParamCheck( - "input", _gpuCommonTypes + TypeSig.DECIMAL_128_FULL + - TypeSig.STRUCT.nested(_gpuCommonTypes + TypeSig.DECIMAL_128_FULL), + "input", _gpuCommonTypes + TypeSig.DECIMAL_128 + + TypeSig.STRUCT.nested(_gpuCommonTypes + TypeSig.DECIMAL_128), TypeSig.all))), (count, conf, p, r) => new AggExprMeta[Count](count, conf, p, r) { override def tagAggForGpu(): Unit = { @@ -2235,9 +2235,9 @@ object GpuOverrides extends Logging { expr[Max]( "Max aggregate operator", ExprChecks.fullAgg( - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, TypeSig.orderable, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.orderable, Seq(ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL) + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL) .withPsNote(TypeEnum.DOUBLE, nanAggPsNote) .withPsNote(TypeEnum.FLOAT, nanAggPsNote), TypeSig.orderable)) @@ -2257,9 +2257,9 @@ object GpuOverrides extends Logging { expr[Min]( "Min aggregate operator", ExprChecks.fullAgg( - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, TypeSig.orderable, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.orderable, Seq(ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL) + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL) .withPsNote(TypeEnum.DOUBLE, nanAggPsNote) .withPsNote(TypeEnum.FLOAT, nanAggPsNote), TypeSig.orderable)) @@ -2279,9 +2279,9 @@ object GpuOverrides extends Logging { expr[Sum]( "Sum aggregate operator", ExprChecks.fullAgg( - TypeSig.LONG + TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - TypeSig.LONG + TypeSig.DOUBLE + TypeSig.DECIMAL_128_FULL, - Seq(ParamCheck("input", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numeric))), + TypeSig.LONG + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + TypeSig.LONG + TypeSig.DOUBLE + TypeSig.DECIMAL_128, + Seq(ParamCheck("input", TypeSig.gpuNumeric, TypeSig.cpuNumeric))), (a, conf, p, r) => new AggExprMeta[Sum](a, conf, p, r) { override def tagAggForGpu(): Unit = { val inputDataType = a.child.dataType @@ -2295,11 +2295,11 @@ object GpuOverrides extends Logging { "first aggregate operator", { ExprChecks.aggNotWindow( (TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128).nested(), TypeSig.all, Seq(ParamCheck("input", (TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128).nested(), TypeSig.all)) ) }, @@ -2314,11 +2314,11 @@ object GpuOverrides extends Logging { "last aggregate operator", { ExprChecks.aggNotWindow( (TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128).nested(), TypeSig.all, Seq(ParamCheck("input", (TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP + - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128).nested(), TypeSig.all)) ) }, @@ -2332,11 +2332,11 @@ object GpuOverrides extends Logging { expr[BRound]( "Round an expression to d decimal places using HALF_EVEN rounding mode", ExprChecks.binaryProject( - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numeric, - ("value", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + + TypeSig.gpuNumeric, TypeSig.cpuNumeric, + ("value", TypeSig.gpuNumeric + TypeSig.psNote(TypeEnum.FLOAT, "result may round slightly differently") + TypeSig.psNote(TypeEnum.DOUBLE, "result may round slightly differently"), - TypeSig.numeric), + TypeSig.cpuNumeric), ("scale", TypeSig.lit(TypeEnum.INT), TypeSig.lit(TypeEnum.INT))), (a, conf, p, r) => new BinaryExprMeta[BRound](a, conf, p, r) { override def tagExprForGpu(): Unit = { @@ -2353,11 +2353,11 @@ object GpuOverrides extends Logging { expr[Round]( "Round an expression to d decimal places using HALF_UP rounding mode", ExprChecks.binaryProject( - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, TypeSig.numeric, - ("value", TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + + TypeSig.gpuNumeric, TypeSig.cpuNumeric, + ("value", TypeSig.gpuNumeric + TypeSig.psNote(TypeEnum.FLOAT, "result may round slightly differently") + TypeSig.psNote(TypeEnum.DOUBLE, "result may round slightly differently"), - TypeSig.numeric), + TypeSig.cpuNumeric), ("scale", TypeSig.lit(TypeEnum.INT), TypeSig.lit(TypeEnum.INT))), (a, conf, p, r) => new BinaryExprMeta[Round](a, conf, p, r) { override def tagExprForGpu(): Unit = { @@ -2514,10 +2514,10 @@ object GpuOverrides extends Logging { "Gets the named field of the struct", ExprChecks.unaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.DECIMAL_128).nested(), TypeSig.all, TypeSig.STRUCT.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.MAP + TypeSig.NULL + TypeSig.DECIMAL_128_FULL), + TypeSig.STRUCT + TypeSig.MAP + TypeSig.NULL + TypeSig.DECIMAL_128), TypeSig.STRUCT.nested(TypeSig.all)), (expr, conf, p, r) => new UnaryExprMeta[GetStructField](expr, conf, p, r) { override def convertToGpu(arr: Expression): GpuExpression = @@ -2527,10 +2527,10 @@ object GpuOverrides extends Logging { "Gets the field at `ordinal` in the Array", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)), (in, conf, p, r) => new GpuGetArrayItemMeta(in, conf, p, r)), @@ -2545,9 +2545,9 @@ object GpuOverrides extends Logging { "Returns value for the given key in value if column is map", ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), TypeSig.all, + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array/map", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP) + + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP) + TypeSig.MAP.nested(TypeSig.STRING) .withPsNote(TypeEnum.MAP ,"If it's map, only string is supported."), TypeSig.ARRAY.nested(TypeSig.all) + TypeSig.MAP.nested(TypeSig.all)), @@ -2570,10 +2570,10 @@ object GpuOverrides extends Logging { // Match exactly with the checks for GetArrayItem ExprChecks.binaryProject( (TypeSig.commonCudfTypes + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.MAP).nested(), + TypeSig.DECIMAL_128 + TypeSig.MAP).nested(), TypeSig.all, ("array", TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.ARRAY + - TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.MAP), + TypeSig.STRUCT + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ("ordinal", TypeSig.lit(TypeEnum.INT), TypeSig.INT)) case _ => throw new IllegalStateException("Only Array or Map is supported as input.") @@ -2589,10 +2589,10 @@ object GpuOverrides extends Logging { expr[MapKeys]( "Returns an unordered array containing the keys of the map", ExprChecks.unaryProject( - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.ARRAY.nested(TypeSig.all - TypeSig.MAP), // Maps cannot have other maps as keys - TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all)), (in, conf, p, r) => new UnaryExprMeta[MapKeys](in, conf, p, r) { @@ -2602,10 +2602,10 @@ object GpuOverrides extends Logging { expr[MapValues]( "Returns an unordered array containing the values of the map", ExprChecks.unaryProject( - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), - TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all)), (in, conf, p, r) => new UnaryExprMeta[MapValues](in, conf, p, r) { @@ -2616,10 +2616,10 @@ object GpuOverrides extends Logging { "Returns an unordered array of all entries in the given map", ExprChecks.unaryProject( // Technically the return type is an array of struct, but we cannot really express that - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), - TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all)), (in, conf, p, r) => new UnaryExprMeta[MapEntries](in, conf, p, r) { @@ -2629,9 +2629,9 @@ object GpuOverrides extends Logging { expr[ArrayMin]( "Returns the minimum value in the array", ExprChecks.unaryProject( - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.orderable, - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL) + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL) .withPsNote(TypeEnum.DOUBLE, nanAggPsNote) .withPsNote(TypeEnum.FLOAT, nanAggPsNote), TypeSig.ARRAY.nested(TypeSig.orderable)), @@ -2646,9 +2646,9 @@ object GpuOverrides extends Logging { expr[ArrayMax]( "Returns the maximum value in the array", ExprChecks.unaryProject( - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.orderable, - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL) + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL) .withPsNote(TypeEnum.DOUBLE, nanAggPsNote) .withPsNote(TypeEnum.FLOAT, nanAggPsNote), TypeSig.ARRAY.nested(TypeSig.orderable)), @@ -2706,10 +2706,10 @@ object GpuOverrides extends Logging { expr[SortArray]( "Returns a sorted array with the input array and the ascending / descending order", ExprChecks.binaryProject( - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL), + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128), TypeSig.ARRAY.nested(TypeSig.all), ("array", TypeSig.ARRAY.nested( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128), TypeSig.ARRAY.nested(TypeSig.all)), ("ascendingOrder", TypeSig.lit(TypeEnum.BOOLEAN), TypeSig.lit(TypeEnum.BOOLEAN))), (sortExpression, conf, p, r) => new BinaryExprMeta[SortArray](sortExpression, conf, p, r) { @@ -2721,12 +2721,12 @@ object GpuOverrides extends Logging { expr[CreateArray]( "Returns an array with the given elements", ExprChecks.projectOnly( - TypeSig.ARRAY.nested(TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.gpuNumeric + TypeSig.NULL + TypeSig.STRING + TypeSig.BOOLEAN + TypeSig.DATE + TypeSig.TIMESTAMP + TypeSig.ARRAY + TypeSig.STRUCT), TypeSig.ARRAY.nested(TypeSig.all), repeatingParamCheck = Some(RepeatingParamCheck("arg", - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.STRING + + TypeSig.gpuNumeric + TypeSig.NULL + TypeSig.STRING + TypeSig.BOOLEAN + TypeSig.DATE + TypeSig.TIMESTAMP + TypeSig.STRUCT + TypeSig.ARRAY.nested(TypeSig.gpuNumeric + TypeSig.NULL + TypeSig.STRING + TypeSig.BOOLEAN + TypeSig.DATE + TypeSig.TIMESTAMP + TypeSig.STRUCT + @@ -2749,15 +2749,15 @@ object GpuOverrides extends Logging { expr[LambdaFunction]( "Holds a higher order SQL function", ExprChecks.projectOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.ARRAY + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all, Seq(ParamCheck("function", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.ARRAY + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all)), Some(RepeatingParamCheck("arguments", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.ARRAY + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all))), (in, conf, p, r) => new ExprMeta[LambdaFunction](in, conf, p, r) { @@ -2772,7 +2772,7 @@ object GpuOverrides extends Logging { expr[NamedLambdaVariable]( "A parameter to a higher order SQL function", ExprChecks.projectOnly( - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.ARRAY + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (in, conf, p, r) => new ExprMeta[NamedLambdaVariable](in, conf, p, r) { @@ -2784,15 +2784,15 @@ object GpuOverrides extends Logging { "Transform elements in an array using the transform function. This is similar to a `map` " + "in functional programming", ExprChecks.projectOnly(TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + - TypeSig.DECIMAL_128_FULL + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), Seq( ParamCheck("argument", - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all)), ParamCheck("function", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all))), (in, conf, p, r) => new ExprMeta[ArrayTransform](in, conf, p, r) { @@ -2802,17 +2802,17 @@ object GpuOverrides extends Logging { }), expr[TransformKeys]( "Transform keys in a map using a transform function", - ExprChecks.projectOnly(TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + ExprChecks.projectOnly(TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all), Seq( ParamCheck("argument", - TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all)), ParamCheck("function", // We need to be able to check for duplicate keys (equality) - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL, + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL, TypeSig.all - TypeSig.MAP.nested()))), (in, conf, p, r) => new ExprMeta[TransformKeys](in, conf, p, r) { override def tagExprForGpu(): Unit = { @@ -2829,16 +2829,16 @@ object GpuOverrides extends Logging { }), expr[TransformValues]( "Transform values in a map using a transform function", - ExprChecks.projectOnly(TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + ExprChecks.projectOnly(TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all), Seq( ParamCheck("argument", - TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + TypeSig.MAP.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.MAP.nested(TypeSig.all)), ParamCheck("function", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all))), (in, conf, p, r) => new ExprMeta[TransformValues](in, conf, p, r) { @@ -2963,11 +2963,11 @@ object GpuOverrides extends Logging { expr[Concat]( "List/String concatenate", ExprChecks.projectOnly((TypeSig.STRING + TypeSig.ARRAY).nested( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128), (TypeSig.STRING + TypeSig.BINARY + TypeSig.ARRAY).nested(TypeSig.all), repeatingParamCheck = Some(RepeatingParamCheck("input", (TypeSig.STRING + TypeSig.ARRAY).nested( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL), + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128), (TypeSig.STRING + TypeSig.BINARY + TypeSig.ARRAY).nested(TypeSig.all)))), (a, conf, p, r) => new ComplexTypeMergingExprMeta[Concat](a, conf, p, r) { override def convertToGpu(child: Seq[Expression]): GpuExpression = GpuConcat(child) @@ -3051,7 +3051,7 @@ object GpuOverrides extends Logging { "The size of an array or a map", ExprChecks.unaryProject(TypeSig.INT, TypeSig.INT, (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.commonCudfTypes + TypeSig.NULL - + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), + + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.all)), (a, conf, p, r) => new UnaryExprMeta[Size](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = @@ -3060,14 +3060,14 @@ object GpuOverrides extends Logging { expr[UnscaledValue]( "Convert a Decimal to an unscaled long value for some aggregation optimizations", ExprChecks.unaryProject(TypeSig.LONG, TypeSig.LONG, - TypeSig.DECIMAL_64, TypeSig.DECIMAL_128_FULL), + TypeSig.DECIMAL_64, TypeSig.DECIMAL_128), (a, conf, p, r) => new UnaryExprMeta[UnscaledValue](a, conf, p, r) { override val isFoldableNonLitAllowed: Boolean = true override def convertToGpu(child: Expression): GpuExpression = GpuUnscaledValue(child) }), expr[MakeDecimal]( "Create a Decimal from an unscaled long value for some aggregation optimizations", - ExprChecks.unaryProject(TypeSig.DECIMAL_64, TypeSig.DECIMAL_128_FULL, + ExprChecks.unaryProject(TypeSig.DECIMAL_64, TypeSig.DECIMAL_128, TypeSig.LONG, TypeSig.LONG), (a, conf, p, r) => new UnaryExprMeta[MakeDecimal](a, conf, p, r) { override def convertToGpu(child: Expression): GpuExpression = @@ -3078,11 +3078,11 @@ object GpuOverrides extends Logging { ExprChecks.unaryProject( // Here is a walk-around representation, since multi-level nested type is not supported yet. // related issue: https://github.com/NVIDIA/spark-rapids/issues/1901 - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.commonCudfTypes + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.all)), (a, conf, p, r) => new GeneratorExprMeta[Explode](a, conf, p, r) { override val supportOuter: Boolean = true @@ -3093,11 +3093,11 @@ object GpuOverrides extends Logging { ExprChecks.unaryProject( // Here is a walk-around representation, since multi-level nested type is not supported yet. // related issue: https://github.com/NVIDIA/spark-rapids/issues/1901 - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.commonCudfTypes + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP), (TypeSig.ARRAY + TypeSig.MAP).nested(TypeSig.all)), (a, conf, p, r) => new GeneratorExprMeta[PosExplode](a, conf, p, r) { override val supportOuter: Boolean = true @@ -3109,11 +3109,11 @@ object GpuOverrides extends Logging { // The plan is optimized to run HashAggregate on the rows to be replicated. // HashAggregateExec doesn't support grouping by 128-bit decimal value yet. // Issue to track decimal 128 support: https://github.com/NVIDIA/spark-rapids/issues/4410 - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT), TypeSig.ARRAY.nested(TypeSig.all), repeatingParamCheck = Some(RepeatingParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT).nested(), TypeSig.all))), (a, conf, p, r) => new ReplicateRowsExprMeta[ReplicateRows](a, conf, p, r) { @@ -3124,11 +3124,11 @@ object GpuOverrides extends Logging { "Collect a list of non-unique elements, not supported in reduction", // GpuCollectList is not yet supported in Reduction context. ExprChecks.aggNotReduction( - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP), TypeSig.ARRAY.nested(TypeSig.all), Seq(ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all))), (c, conf, p, r) => new TypedImperativeAggExprMeta[CollectList](c, conf, p, r) { @@ -3157,11 +3157,11 @@ object GpuOverrides extends Logging { // Compared to CollectList, StructType is NOT in GpuCollectSet because underlying // method drop_list_duplicates doesn't support nested types. ExprChecks.aggNotReduction( - TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + TypeSig.ARRAY.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT), TypeSig.ARRAY.nested(TypeSig.all), Seq(ParamCheck("input", - (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT).nested(), TypeSig.all))), (c, conf, p, r) => new TypedImperativeAggExprMeta[CollectSet](c, conf, p, r) { @@ -3234,14 +3234,14 @@ object GpuOverrides extends Logging { ExprChecks.groupByOnly( // note that output can be single number or array depending on whether percentiles param // is a single number or an array - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL + - TypeSig.ARRAY.nested(TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL), - TypeSig.numeric + TypeSig.DATE + TypeSig.TIMESTAMP + TypeSig.ARRAY.nested( - TypeSig.numeric + TypeSig.DATE + TypeSig.TIMESTAMP), + TypeSig.gpuNumeric + + TypeSig.ARRAY.nested(TypeSig.gpuNumeric), + TypeSig.cpuNumeric + TypeSig.DATE + TypeSig.TIMESTAMP + TypeSig.ARRAY.nested( + TypeSig.cpuNumeric + TypeSig.DATE + TypeSig.TIMESTAMP), Seq( ParamCheck("input", - TypeSig.gpuNumeric + TypeSig.DECIMAL_128_FULL, - TypeSig.numeric + TypeSig.DATE + TypeSig.TIMESTAMP), + TypeSig.gpuNumeric, + TypeSig.cpuNumeric + TypeSig.DATE + TypeSig.TIMESTAMP), ParamCheck("percentage", TypeSig.DOUBLE + TypeSig.ARRAY.nested(TypeSig.DOUBLE), TypeSig.DOUBLE + TypeSig.ARRAY.nested(TypeSig.DOUBLE)), @@ -3296,8 +3296,8 @@ object GpuOverrides extends Logging { expr[org.apache.spark.sql.execution.ScalarSubquery]( "Subquery that will return only one row and one column", ExprChecks.projectOnly( - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, - TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, + TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128, Nil, None), (a, conf, p, r) => new ExprMeta[org.apache.spark.sql.execution.ScalarSubquery](a, conf, p, r) { @@ -3385,7 +3385,7 @@ object GpuOverrides extends Logging { part[RangePartitioning]( "Range partitioning", PartChecks(RepeatingParamCheck("order_key", - (pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT).nested(), + (pluginSupportedOrderableSig + TypeSig.DECIMAL_128 + TypeSig.STRUCT).nested(), TypeSig.orderable)), (rp, conf, p, r) => new PartMeta[RangePartitioning](rp, conf, p, r) { override val childExprs: Seq[BaseExprMeta[_]] = @@ -3446,7 +3446,7 @@ object GpuOverrides extends Logging { exec[GenerateExec] ( "The backend for operations that generate more output rows than input rows like explode", ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (gen, conf, p, r) => new GpuGenerateExecSparkPlanMeta(gen, conf, p, r)), @@ -3454,7 +3454,7 @@ object GpuOverrides extends Logging { "The backend for most select, withColumn and dropColumn statements", ExecChecks( (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (proj, conf, p, r) => new GpuProjectExecMeta(proj, conf, p, r)), exec[RangeExec]( @@ -3471,7 +3471,7 @@ object GpuOverrides extends Logging { "The backend for most file input", ExecChecks( (TypeSig.commonCudfTypes + TypeSig.STRUCT + TypeSig.MAP + TypeSig.ARRAY + - TypeSig.DECIMAL_128_FULL).nested(), + TypeSig.DECIMAL_128).nested(), TypeSig.all), (p, conf, parent, r) => new SparkPlanMeta[BatchScanExec](p, conf, parent, r) { override val childScans: scala.Seq[ScanMeta[_]] = @@ -3482,7 +3482,7 @@ object GpuOverrides extends Logging { }), exec[CoalesceExec]( "The backend for the dataframe coalesce method", - ExecChecks((_gpuCommonTypes + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT + TypeSig.ARRAY + + ExecChecks((_gpuCommonTypes + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all), (coalesce, conf, parent, r) => new SparkPlanMeta[CoalesceExec](coalesce, conf, parent, r) { @@ -3491,7 +3491,7 @@ object GpuOverrides extends Logging { }), exec[DataWritingCommandExec]( "Writing data", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL.withPsNote( + ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128.withPsNote( TypeEnum.DECIMAL, "128bit decimal only supported for Orc and Parquet") + TypeSig.STRUCT.withPsNote(TypeEnum.STRUCT, "Only supported for Parquet") + TypeSig.MAP.withPsNote(TypeEnum.MAP, "Only supported for Parquet") + @@ -3509,7 +3509,7 @@ object GpuOverrides extends Logging { "Take the first limit elements as defined by the sortOrder, and do projection if needed", // The SortOrder TypeSig will govern what types can actually be used as sorting key data type. // The types below are allowed as inputs and outputs. - ExecChecks((pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + + ExecChecks((pluginSupportedOrderableSig + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(), TypeSig.all), (takeExec, conf, p, r) => new SparkPlanMeta[TakeOrderedAndProjectExec](takeExec, conf, p, r) { @@ -3545,7 +3545,7 @@ object GpuOverrides extends Logging { }), exec[LocalLimitExec]( "Per-partition limiting of results", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all), (localLimitExec, conf, p, r) => @@ -3555,7 +3555,7 @@ object GpuOverrides extends Logging { }), exec[GlobalLimitExec]( "Limiting of results across partitions", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all), (globalLimitExec, conf, p, r) => @@ -3565,7 +3565,7 @@ object GpuOverrides extends Logging { }), exec[CollectLimitExec]( "Reduce to single partition and apply limit", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all), (collectLimitExec, conf, p, r) => new GpuCollectLimitMeta(collectLimitExec, conf, p, r)) @@ -3575,14 +3575,14 @@ object GpuOverrides extends Logging { exec[FilterExec]( "The backend for most filter statements", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (filter, conf, p, r) => new SparkPlanMeta[FilterExec](filter, conf, p, r) { override def convertToGpu(): GpuExec = GpuFilterExec(childExprs.head.convertToGpu(), childPlans.head.convertIfNeeded()) }), exec[ShuffleExchangeExec]( "The backend for most data being exchanged between processes", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested() .withPsNote(TypeEnum.STRUCT, "Round-robin partitioning is not supported for nested " + s"structs if ${SQLConf.SORT_BEFORE_REPARTITION.key} is true") @@ -3594,7 +3594,7 @@ object GpuOverrides extends Logging { (shuffle, conf, p, r) => new GpuShuffleMeta(shuffle, conf, p, r)), exec[UnionExec]( "The backend for the union operator", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT).nested() .withPsNote(TypeEnum.STRUCT, "unionByName will not optionally impute nulls for missing struct fields " + @@ -3605,9 +3605,9 @@ object GpuOverrides extends Logging { }), exec[BroadcastExchangeExec]( "The backend for broadcast exchange of data", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested(TypeSig.commonCudfTypes + - TypeSig.NULL + TypeSig.DECIMAL_128_FULL + TypeSig.STRUCT), + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT), TypeSig.all), (exchange, conf, p, r) => new GpuBroadcastMeta(exchange, conf, p, r)), exec[BroadcastHashJoinExec]( @@ -3622,9 +3622,9 @@ object GpuOverrides extends Logging { (join, conf, p, r) => new GpuBroadcastNestedLoopJoinMeta(join, conf, p, r)), exec[CartesianProductExec]( "Implementation of join using brute force", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT) - .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + .nested(TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT), TypeSig.all), (join, conf, p, r) => new SparkPlanMeta[CartesianProductExec](join, conf, p, r) { @@ -3648,7 +3648,7 @@ object GpuOverrides extends Logging { exec[HashAggregateExec]( "The backend for hash based aggregations", ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT) .nested() .withPsNote(TypeEnum.ARRAY, "not allowed for grouping expressions") @@ -3662,7 +3662,7 @@ object GpuOverrides extends Logging { ExecChecks( // note that binary input is allowed here but there are additional checks later on to // check that we have can support binary in the context of aggregate buffer conversions - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.BINARY) .nested() .withPsNote(TypeEnum.BINARY, "only allowed when aggregate buffers can be " + @@ -3680,7 +3680,7 @@ object GpuOverrides extends Logging { exec[SortAggregateExec]( "The backend for sort based aggregations", ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.MAP + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.BINARY) .nested() .withPsNote(TypeEnum.BINARY, "only allowed when aggregate buffers can be " + @@ -3695,7 +3695,7 @@ object GpuOverrides extends Logging { "The backend for the sort operator", // The SortOrder TypeSig will govern what types can actually be used as sorting key data type. // The types below are allowed as inputs and outputs. - ExecChecks((pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + + ExecChecks((pluginSupportedOrderableSig + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT +TypeSig.MAP + TypeSig.BINARY).nested(), TypeSig.all), (sort, conf, p, r) => new GpuSortMeta(sort, conf, p, r)), exec[SortMergeJoinExec]( @@ -3711,7 +3711,7 @@ object GpuOverrides extends Logging { (expand, conf, p, r) => new GpuExpandExecMeta(expand, conf, p, r)), exec[WindowExec]( "Window-operator backend", - ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.STRUCT + TypeSig.ARRAY + TypeSig.MAP).nested(), TypeSig.all, Map("partitionSpec" -> @@ -3722,7 +3722,7 @@ object GpuOverrides extends Logging { exec[SampleExec]( "The backend for the sample operator", ExecChecks((TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.STRUCT + TypeSig.MAP + - TypeSig.ARRAY + TypeSig.DECIMAL_128_FULL).nested(), TypeSig.all), + TypeSig.ARRAY + TypeSig.DECIMAL_128).nested(), TypeSig.all), (sample, conf, p, r) => new GpuSampleExecMeta(sample, conf, p, r) ), exec[SubqueryBroadcastExec]( diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuUserDefinedFunction.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuUserDefinedFunction.scala index 0dc5c75f743..ea0efff23e2 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuUserDefinedFunction.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuUserDefinedFunction.scala @@ -75,7 +75,7 @@ trait GpuUserDefinedFunction extends GpuExpression object GpuUserDefinedFunction { // UDFs can support all types except UDT which does not have a clear columnar representation. - val udfTypeSig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + + val udfTypeSig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + TypeSig.BINARY + TypeSig.CALENDAR + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT).nested() /** (This will be initialized once per process) */ diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/TypeChecks.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/TypeChecks.scala index 1706b305fca..75dedf38a2a 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/TypeChecks.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/TypeChecks.scala @@ -566,7 +566,7 @@ object TypeSig { * slightly less than full DECIMAL support. This are things like math operations where * we cannot replicate the overflow behavior of Spark. These will be added when needed. */ - val DECIMAL_128_FULL: TypeSig = decimal(DType.DECIMAL128_MAX_PRECISION) + val DECIMAL_128: TypeSig = decimal(DType.DECIMAL128_MAX_PRECISION) val NULL: TypeSig = new TypeSig(TypeEnum.ValueSet(TypeEnum.NULL)) val BINARY: TypeSig = new TypeSig(TypeEnum.ValueSet(TypeEnum.BINARY)) @@ -620,12 +620,12 @@ object TypeSig { /** * All numeric types fp + integral + DECIMAL_64 */ - val gpuNumeric: TypeSig = integral + fp + DECIMAL_64 + val gpuNumeric: TypeSig = integral + fp + DECIMAL_128 /** - * All numeric types fp + integral + DECIMAL_128_FULL + * All numeric types fp + integral + DECIMAL_128 */ - val numeric: TypeSig = integral + fp + DECIMAL_128_FULL + val cpuNumeric: TypeSig = integral + fp + DECIMAL_128 /** * All values that correspond to Spark's AtomicType but supported by GPU @@ -635,7 +635,7 @@ object TypeSig { /** * All values that correspond to Spark's AtomicType */ - val atomics: TypeSig = numeric + BINARY + BOOLEAN + DATE + STRING + TIMESTAMP + val cpuAtomics: TypeSig = cpuNumeric + BINARY + BOOLEAN + DATE + STRING + TIMESTAMP /** * numeric + CALENDAR but only for GPU @@ -657,7 +657,7 @@ object TypeSig { * All types that Spark supports sorting/ordering on (really everything but MAP) */ val orderable: TypeSig = (BOOLEAN + BYTE + SHORT + INT + LONG + FLOAT + DOUBLE + DATE + - TIMESTAMP + STRING + DECIMAL_128_FULL + NULL + BINARY + CALENDAR + ARRAY + STRUCT + + TIMESTAMP + STRING + DECIMAL_128 + NULL + BINARY + CALENDAR + ARRAY + STRUCT + UDT).nested() /** @@ -665,7 +665,7 @@ object TypeSig { * to https://spark.apache.org/docs/latest/api/sql/index.html#_12), e.g. "<=>", "=", "==". */ val comparable: TypeSig = (BOOLEAN + BYTE + SHORT + INT + LONG + FLOAT + DOUBLE + DATE + - TIMESTAMP + STRING + DECIMAL_128_FULL + NULL + BINARY + CALENDAR + ARRAY + STRUCT + + TIMESTAMP + STRING + DECIMAL_128 + NULL + BINARY + CALENDAR + ARRAY + STRUCT + UDT).nested() /** @@ -1050,7 +1050,7 @@ case class ExprChecksImpl(contexts: Map[ExpressionContext, ContextChecks]) * This is specific to CaseWhen, because it does not follow the typical parameter convention. */ object CaseWhenCheck extends ExprChecks { - val check: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + val check: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT + TypeSig.MAP).nested() val sparkSig: TypeSig = TypeSig.all @@ -1101,8 +1101,8 @@ object CaseWhenCheck extends ExprChecks { */ object WindowSpecCheck extends ExprChecks { val check: TypeSig = - TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL + - TypeSig.STRUCT.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128_FULL + TypeSig.NULL) + TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL + + TypeSig.STRUCT.nested(TypeSig.commonCudfTypes + TypeSig.DECIMAL_128 + TypeSig.NULL) val sparkSig: TypeSig = TypeSig.all override def tagAst(meta: BaseExprMeta[_]): Unit = { @@ -1146,11 +1146,11 @@ object CreateMapCheck extends ExprChecks { // Spark supports all types except for Map for key (Map is not supported // even in child types) - private val keySig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + private val keySig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.STRUCT).nested() private val valueSig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + - TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT).nested() + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT).nested() override def tagAst(meta: BaseExprMeta[_]): Unit = { meta.willNotWorkInAst("CreateMap is not supported by AST") @@ -1181,7 +1181,7 @@ object CreateMapCheck extends ExprChecks { object CreateNamedStructCheck extends ExprChecks { val nameSig: TypeSig = TypeSig.lit(TypeEnum.STRING) val sparkNameSig: TypeSig = TypeSig.lit(TypeEnum.STRING) - val valueSig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128_FULL + + val valueSig: TypeSig = (TypeSig.commonCudfTypes + TypeSig.NULL + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP + TypeSig.STRUCT).nested() val sparkValueSig: TypeSig = TypeSig.all val resultSig: TypeSig = TypeSig.STRUCT.nested(valueSig) @@ -1235,50 +1235,50 @@ class CastChecks extends ExprChecks { // When updating these please check child classes too import TypeSig._ val nullChecks: TypeSig = integral + fp + BOOLEAN + TIMESTAMP + DATE + STRING + - NULL + DECIMAL_128_FULL + NULL + DECIMAL_128 val sparkNullSig: TypeSig = all - val booleanChecks: TypeSig = integral + fp + BOOLEAN + TIMESTAMP + STRING + DECIMAL_128_FULL - val sparkBooleanSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + STRING + val booleanChecks: TypeSig = integral + fp + BOOLEAN + TIMESTAMP + STRING + DECIMAL_128 + val sparkBooleanSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + STRING val integralChecks: TypeSig = gpuNumeric + BOOLEAN + TIMESTAMP + STRING + - BINARY + DECIMAL_128_FULL - val sparkIntegralSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + STRING + BINARY + BINARY + val sparkIntegralSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + STRING + BINARY val fpToStringPsNote: String = s"Conversion may produce different results and requires " + s"${RapidsConf.ENABLE_CAST_FLOAT_TO_STRING} to be true." - val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + TIMESTAMP + STRING + DECIMAL_128_FULL) + val fpChecks: TypeSig = (gpuNumeric + BOOLEAN + TIMESTAMP + STRING) .withPsNote(TypeEnum.STRING, fpToStringPsNote) - val sparkFpSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + STRING + val sparkFpSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + STRING val dateChecks: TypeSig = integral + fp + BOOLEAN + TIMESTAMP + DATE + STRING - val sparkDateSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + DATE + STRING + val sparkDateSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + DATE + STRING val timestampChecks: TypeSig = integral + fp + BOOLEAN + TIMESTAMP + DATE + STRING - val sparkTimestampSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + DATE + STRING + val sparkTimestampSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + DATE + STRING val stringChecks: TypeSig = gpuNumeric + BOOLEAN + TIMESTAMP + DATE + STRING + - BINARY + DECIMAL_128_FULL - val sparkStringSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + DATE + CALENDAR + STRING + BINARY + BINARY + val sparkStringSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + DATE + CALENDAR + STRING + BINARY val binaryChecks: TypeSig = none val sparkBinarySig: TypeSig = STRING + BINARY - val decimalChecks: TypeSig = gpuNumeric + DECIMAL_128_FULL + STRING - val sparkDecimalSig: TypeSig = numeric + BOOLEAN + TIMESTAMP + STRING + val decimalChecks: TypeSig = gpuNumeric + STRING + val sparkDecimalSig: TypeSig = cpuNumeric + BOOLEAN + TIMESTAMP + STRING val calendarChecks: TypeSig = none val sparkCalendarSig: TypeSig = CALENDAR + STRING val arrayChecks: TypeSig = psNote(TypeEnum.STRING, "the array's child type must also support " + - "being cast to string") + ARRAY.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + - ARRAY + BINARY + STRUCT + MAP) + - psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to the " + - "desired child type(s)") + "being cast to string") + ARRAY.nested(commonCudfTypes + DECIMAL_128 + NULL + + ARRAY + BINARY + STRUCT + MAP) + + psNote(TypeEnum.ARRAY, "The array's child type must also support being cast to the " + + "desired child type(s)") val sparkArraySig: TypeSig = STRING + ARRAY.nested(all) - val mapChecks: TypeSig = MAP.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + + val mapChecks: TypeSig = MAP.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + psNote(TypeEnum.MAP, "the map's key and value must also support being cast to the " + "desired child types") @@ -1286,7 +1286,7 @@ class CastChecks extends ExprChecks { val structChecks: TypeSig = psNote(TypeEnum.STRING, "the struct's children must also support " + "being cast to string") + - STRUCT.nested(commonCudfTypes + DECIMAL_128_FULL + NULL + ARRAY + BINARY + STRUCT + MAP) + + STRUCT.nested(commonCudfTypes + DECIMAL_128 + NULL + ARRAY + BINARY + STRUCT + MAP) + psNote(TypeEnum.STRUCT, "the struct's children must also support being cast to the " + "desired child type(s)") val sparkStructSig: TypeSig = STRING + STRUCT.nested(all) diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuHashJoin.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuHashJoin.scala index e67c9ae81db..4a51741b471 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuHashJoin.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuHashJoin.scala @@ -62,7 +62,7 @@ object JoinTypeChecks { private[this] val sparkSupportedJoinKeyTypes = TypeSig.all - TypeSig.MAP.nested() private[this] val joinRideAlongTypes = - (cudfSupportedKeyTypes + TypeSig.DECIMAL_128_FULL + TypeSig.ARRAY + TypeSig.MAP).nested() + (cudfSupportedKeyTypes + TypeSig.DECIMAL_128 + TypeSig.ARRAY + TypeSig.MAP).nested() val equiJoinExecChecks: ExecChecks = ExecChecks( joinRideAlongTypes, diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuShuffleExchangeExecBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuShuffleExchangeExecBase.scala index b0e88e25136..724517de73d 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuShuffleExchangeExecBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuShuffleExchangeExecBase.scala @@ -84,7 +84,7 @@ class GpuShuffleMeta( case _: RoundRobinPartitioning if ShimLoader.getSparkShims.sessionFromPlan(shuffle).sessionState.conf .sortBeforeRepartition => - val orderableTypes = GpuOverrides.pluginSupportedOrderableSig + TypeSig.DECIMAL_128_FULL + val orderableTypes = GpuOverrides.pluginSupportedOrderableSig + TypeSig.DECIMAL_128 shuffle.output.map(_.dataType) .filterNot(orderableTypes.isSupportedByPlugin) .foreach { dataType =>