diff --git a/integration_tests/src/main/python/orc_write_test.py b/integration_tests/src/main/python/orc_write_test.py index 4633315202a..93a8a948426 100644 --- a/integration_tests/src/main/python/orc_write_test.py +++ b/integration_tests/src/main/python/orc_write_test.py @@ -109,6 +109,7 @@ def test_orc_write_compression_fallback(spark_tmp_path, codec, spark_tmp_table_f 'DataWritingCommandExec', conf=all_confs) +@ignore_order @allow_non_gpu('DataWritingCommandExec') def test_buckets_write_fallback(spark_tmp_path, spark_tmp_table_factory): data_path = spark_tmp_path + '/ORC_DATA' diff --git a/integration_tests/src/main/python/parquet_write_test.py b/integration_tests/src/main/python/parquet_write_test.py index a3014dc31af..9623a978b9b 100644 --- a/integration_tests/src/main/python/parquet_write_test.py +++ b/integration_tests/src/main/python/parquet_write_test.py @@ -229,6 +229,7 @@ def test_parquet_writeLegacyFormat_fallback(spark_tmp_path, spark_tmp_table_fact 'DataWritingCommandExec', conf=all_confs) +@ignore_order @allow_non_gpu('DataWritingCommandExec') def test_buckets_write_fallback(spark_tmp_path, spark_tmp_table_factory): data_path = spark_tmp_path + '/PARQUET_DATA' diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/RebaseHelper.scala b/sql-plugin/src/main/scala/com/nvidia/spark/RebaseHelper.scala index 27c9c85e249..858fc74da2c 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/RebaseHelper.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/RebaseHelper.scala @@ -39,7 +39,9 @@ object RebaseHelper extends Arm { } } } else if (dtype.isTimestampType) { - assert(dtype == DType.TIMESTAMP_MICROSECONDS) + // TODO - https://github.com/NVIDIA/spark-rapids/issues/1130 to properly handle + // TIMESTAMP_MILLIS, for use require so we fail if that happens + require(dtype == DType.TIMESTAMP_MICROSECONDS) withResource( Scalar.timestampFromLong(DType.TIMESTAMP_MICROSECONDS, startTs)) { minGood => withResource(column.lessThan(minGood)) { hasBad =>