Skip to content

Commit

Permalink
Make testExpectedExceptionStartsWith more flexible (NVIDIA#1106)
Browse files Browse the repository at this point in the history
Signed-off-by: Andy Grove <andygrove@nvidia.com>
  • Loading branch information
andygrove authored Nov 12, 2020
1 parent 98fb783 commit 6096848
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@ import org.apache.spark.SparkConf
import org.apache.spark.sql.functions.col

class CsvScanSuite extends SparkQueryCompareTestSuite {
testExpectedExceptionStartsWith("Test CSV projection including unsupported types",
classOf[IllegalArgumentException],
"Part of the plan is not columnar",
testExpectedException[IllegalArgumentException]("Test CSV projection including unsupported types",
_.getMessage.startsWith("Part of the plan is not columnar"),
mixedTypesFromCsvWithHeader) {
frame => frame.select(col("c_string"), col("c_int"), col("c_timestamp"))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,10 +283,9 @@ class HashAggregatesSuite extends SparkQueryCompareTestSuite {
frame => frame.agg(avg(lit("abc")),avg(lit("pqr")))
}

testExpectedExceptionStartsWith(
testExpectedException[AnalysisException](
"avg literals bools fail",
classOf[AnalysisException],
"cannot resolve",
_.getMessage.startsWith("cannot resolve"),
longsFromCSVDf,
conf = floatAggConf) {
frame => frame.agg(avg(lit(true)),avg(lit(false)))
Expand Down Expand Up @@ -1550,10 +1549,10 @@ class HashAggregatesSuite extends SparkQueryCompareTestSuite {

if (spark.SPARK_VERSION_SHORT < "3.1.0") {
// A test that verifies that Distinct with Filter is not supported on the CPU or the GPU.
testExpectedExceptionStartsWith(
testExpectedException[AnalysisException](
"Avg Distinct with filter - unsupported on CPU and GPU",
classOf[AnalysisException],
"DISTINCT and FILTER cannot be used in aggregate functions at the same time",
_.getMessage.startsWith(
"DISTINCT and FILTER cannot be used in aggregate functions at the same time"),
longsFromCSVDf, conf = floatAggConf) {
frame => frame.selectExpr("avg(distinct longs) filter (where longs < 5)")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,9 @@ class ParquetWriterSuite extends SparkQueryCompareTestSuite {
}
}

testExpectedExceptionStartsWith(
testExpectedException[IllegalArgumentException](
"int96 timestamps not supported",
classOf[IllegalArgumentException],
"Part of the plan is not columnar",
_.getMessage.startsWith("Part of the plan is not columnar"),
frameFromParquet("timestamp-date-test-msec.parquet"),
new SparkConf().set("spark.sql.parquet.outputTimestampType", "INT96")) {
val tempFile = File.createTempFile("int96", "parquet")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -793,10 +793,9 @@ trait SparkQueryCompareTestSuite extends FunSuite with Arm {
}
}

def testExpectedExceptionStartsWith[T <: Throwable](
def testExpectedException[T <: Throwable](
testName: String,
exceptionClass: Class[T],
expectedException: String,
expectedException: T => Boolean,
df: SparkSession => DataFrame,
conf: SparkConf = new SparkConf(),
repart: Integer = 1,
Expand All @@ -819,8 +818,8 @@ trait SparkQueryCompareTestSuite extends FunSuite with Arm {
compareResults(sort, maxFloatDiff, fromCpu, fromGpu)
})
t match {
case Failure(e) if e.getClass == exceptionClass => {
assert(e.getMessage != null && e.getMessage.startsWith(expectedException))
case Failure(e) if e.isInstanceOf[T] => {
assert(expectedException(e.asInstanceOf[T]))
}
case Failure(e) => throw e
case _ => fail("Expected an exception")
Expand Down

0 comments on commit 6096848

Please sign in to comment.