Skip to content

Commit

Permalink
XFAIL some tests with Spark 3.3.0 with LEGACY timeParserPolicy
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed Mar 8, 2022
1 parent e0b4a44 commit f0599ab
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 3 deletions.
8 changes: 5 additions & 3 deletions integration_tests/src/main/python/json_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from conftest import is_databricks_runtime
from marks import approximate_float, allow_non_gpu, ignore_order

from spark_session import with_cpu_session, with_gpu_session, is_before_spark_330
from spark_session import with_cpu_session, with_gpu_session, is_before_spark_330, is_spark_330_or_later

json_supported_gens = [
# Spark does not escape '\r' or '\n' even though it uses it to mark end of record
Expand Down Expand Up @@ -236,7 +236,8 @@ def test_basic_json_read(std_input_path, filename, schema, read_func, allow_non_
@pytest.mark.parametrize('read_func', [read_json_df, read_json_sql])
@pytest.mark.parametrize('ansi_enabled', ["true", "false"])
@pytest.mark.parametrize('time_parser_policy', [
pytest.param('LEGACY', marks=pytest.mark.allow_non_gpu('FileSourceScanExec')),
pytest.param('LEGACY', marks=[pytest.mark.allow_non_gpu('FileSourceScanExec'), \
pytest.mark.xfail(not is_before_spark_330(), reason="https://github.com/NVIDIA/spark-rapids/issues/4912")]),
'CORRECTED',
'EXCEPTION'
])
Expand All @@ -262,7 +263,8 @@ def test_json_read_valid_dates(std_input_path, filename, schema, read_func, ansi
@pytest.mark.parametrize('read_func', [read_json_df, read_json_sql])
@pytest.mark.parametrize('ansi_enabled', ["true", "false"])
@pytest.mark.parametrize('time_parser_policy', [
pytest.param('LEGACY', marks=pytest.mark.allow_non_gpu('FileSourceScanExec')),
pytest.param('LEGACY', marks=[pytest.mark.allow_non_gpu('FileSourceScanExec'), \
pytest.mark.xfail(is_spark_330_or_later(), reason="https://github.com/NVIDIA/spark-rapids/issues/4912")]),
'CORRECTED',
'EXCEPTION'
])
Expand Down
3 changes: 3 additions & 0 deletions integration_tests/src/main/python/spark_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,9 @@ def is_before_spark_320():
def is_before_spark_330():
return spark_version() < "3.3.0"

def is_spark_330_or_later():
return spark_version() >= "3.3.0"

def is_databricks91_or_later():
spark = get_spark_i_know_what_i_am_doing()
return spark.conf.get("spark.databricks.clusterUsageTags.sparkVersion", "") >= "9.1"

0 comments on commit f0599ab

Please sign in to comment.