diff --git a/tests/src/test/scala/org/apache/spark/sql/rapids/ParquetFormatScanSuite.scala b/tests/src/test/scala/org/apache/spark/sql/rapids/ParquetFormatScanSuite.scala index 6e233d42bf7..e974c145347 100644 --- a/tests/src/test/scala/org/apache/spark/sql/rapids/ParquetFormatScanSuite.scala +++ b/tests/src/test/scala/org/apache/spark/sql/rapids/ParquetFormatScanSuite.scala @@ -23,8 +23,6 @@ import java.time.LocalDateTime import scala.collection.JavaConverters.mapAsJavaMapConverter import scala.concurrent.duration._ -import ai.rapids.cudf -import com.nvidia.spark.rapids.Arm.withResource import com.nvidia.spark.rapids.SparkQueryCompareTestSuite import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path @@ -46,8 +44,6 @@ import org.apache.spark.util.Utils * A lot of this testing code is based off of similar Spark tests. */ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually { - private val debugPrinter = cudf.TableDebug.get(); - implicit class RecordConsumerDSL(consumer: RecordConsumer) { def message(f: => Unit): Unit = { consumer.startMessage() @@ -1131,10 +1127,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Array(0, 1), Array("TEST"))), data) } @@ -1167,10 +1159,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Array(0, 1))), data) } @@ -1210,10 +1198,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Array(Row("TEST", 0), Row("DATA", 1)))), data) } @@ -1252,10 +1236,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Array(Row(0), Row(1)))), data) } @@ -1294,10 +1274,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Array(Row(0), Row(1)))), data) } @@ -1435,10 +1411,6 @@ class ParquetFormatScanSuite extends SparkQueryCompareTestSuite with Eventually } }) - withResource(cudf.Table.readParquet(new File(testPath))) { table => - debugPrinter.debug("DIRECT READ", table) - } - val data = spark.read.parquet(testPath).collect() sameRows(Seq(Row(Map(0 -> 2, 1 -> 3))), data) }