From a6193b14150d3d39eec7e94093850842c5a459d2 Mon Sep 17 00:00:00 2001 From: Andy Grove Date: Fri, 8 Apr 2022 14:12:23 -0600 Subject: [PATCH 1/2] Temporarily ignore tests that fail with Spark 3.3 Signed-off-by: Andy Grove --- integration_tests/src/main/python/arithmetic_ops_test.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/integration_tests/src/main/python/arithmetic_ops_test.py b/integration_tests/src/main/python/arithmetic_ops_test.py index e1d4f8c3c24..a674f1b2cfb 100644 --- a/integration_tests/src/main/python/arithmetic_ops_test.py +++ b/integration_tests/src/main/python/arithmetic_ops_test.py @@ -19,7 +19,8 @@ from marks import ignore_order, incompat, approximate_float, allow_non_gpu from pyspark.sql.types import * from pyspark.sql.types import IntegralType -from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, is_before_spark_330, is_databricks91_or_later +from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, \ + is_before_spark_330, is_databricks91_or_later, is_spark_330_or_later import pyspark.sql.functions as f from datetime import timedelta @@ -780,6 +781,7 @@ def _get_div_overflow_df(spark, expr): # Only run this test for Spark v3.2.0 and later to verify IntegralDivide will # throw exceptions for overflow when ANSI mode is enabled. @pytest.mark.skipif(is_before_spark_320(), reason='https://github.com/apache/spark/pull/32260') +@pytest.mark.skipif(is_spark_330_or_later(), reason='https://github.com/NVIDIA/spark-rapids/issues/5182') @pytest.mark.parametrize('expr', div_overflow_exprs) @pytest.mark.parametrize('ansi_enabled', ['false', 'true']) def test_div_overflow_exception_when_ansi(expr, ansi_enabled): From 1ef2cff1b0d37387c72e4716aba8d1d5ba720a7a Mon Sep 17 00:00:00 2001 From: Andy Grove Date: Fri, 8 Apr 2022 14:31:04 -0600 Subject: [PATCH 2/2] fix --- integration_tests/src/main/python/arithmetic_ops_test.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/integration_tests/src/main/python/arithmetic_ops_test.py b/integration_tests/src/main/python/arithmetic_ops_test.py index a674f1b2cfb..2c84b689571 100644 --- a/integration_tests/src/main/python/arithmetic_ops_test.py +++ b/integration_tests/src/main/python/arithmetic_ops_test.py @@ -19,8 +19,7 @@ from marks import ignore_order, incompat, approximate_float, allow_non_gpu from pyspark.sql.types import * from pyspark.sql.types import IntegralType -from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, \ - is_before_spark_330, is_databricks91_or_later, is_spark_330_or_later +from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, is_before_spark_330, is_databricks91_or_later import pyspark.sql.functions as f from datetime import timedelta @@ -781,7 +780,6 @@ def _get_div_overflow_df(spark, expr): # Only run this test for Spark v3.2.0 and later to verify IntegralDivide will # throw exceptions for overflow when ANSI mode is enabled. @pytest.mark.skipif(is_before_spark_320(), reason='https://github.com/apache/spark/pull/32260') -@pytest.mark.skipif(is_spark_330_or_later(), reason='https://github.com/NVIDIA/spark-rapids/issues/5182') @pytest.mark.parametrize('expr', div_overflow_exprs) @pytest.mark.parametrize('ansi_enabled', ['false', 'true']) def test_div_overflow_exception_when_ansi(expr, ansi_enabled): @@ -790,7 +788,7 @@ def test_div_overflow_exception_when_ansi(expr, ansi_enabled): assert_gpu_and_cpu_error( df_fun=lambda spark: _get_div_overflow_df(spark, expr).collect(), conf=ansi_conf, - error_message='java.lang.ArithmeticException: Overflow in integral divide') + error_message='ArithmeticException: Overflow in integral divide') else: assert_gpu_and_cpu_are_equal_collect( func=lambda spark: _get_div_overflow_df(spark, expr),