diff --git a/integration_tests/src/main/python/collection_ops_test.py b/integration_tests/src/main/python/collection_ops_test.py index e9664e9c9e8..42824bd12fe 100644 --- a/integration_tests/src/main/python/collection_ops_test.py +++ b/integration_tests/src/main/python/collection_ops_test.py @@ -20,8 +20,7 @@ from string_test import mk_str_gen import pyspark.sql.functions as f import pyspark.sql.utils -from spark_session import with_cpu_session, with_gpu_session, is_before_spark_334, is_before_spark_351, is_before_spark_342, is_before_spark_340, is_spark_350 -from spark_init_internal import spark_version +from spark_session import with_cpu_session, with_gpu_session, is_before_spark_351 from conftest import get_datagen_seed from marks import allow_non_gpu @@ -327,8 +326,7 @@ def test_sequence_illegal_boundaries(start_gen, stop_gen, step_gen): @pytest.mark.parametrize('stop_gen', sequence_too_long_length_gens, ids=idfn) @allow_non_gpu(*non_utc_allow) def test_sequence_too_long_sequence(stop_gen): - msg = "Too long sequence" if is_before_spark_334() or (not is_before_spark_340() and is_before_spark_342()) \ - or is_spark_350() else "Unsuccessful try to create array with" + msg = "Too long sequence" if is_before_spark_351() else "Unsuccessful try to create array with" assert_gpu_and_cpu_error( # To avoid OOM, reduce the row number to 1, it is enough to verify this case. lambda spark:unary_op_df(spark, stop_gen, 1).selectExpr( diff --git a/integration_tests/src/main/python/spark_session.py b/integration_tests/src/main/python/spark_session.py index 8871551d6ed..bcb1c526dbb 100644 --- a/integration_tests/src/main/python/spark_session.py +++ b/integration_tests/src/main/python/spark_session.py @@ -187,27 +187,18 @@ def is_before_spark_330(): def is_before_spark_331(): return spark_version() < "3.3.1" -def is_before_spark_334(): - return spark_version() < "3.3.4" - def is_before_spark_340(): return spark_version() < "3.4.0" def is_before_spark_341(): return spark_version() < "3.4.1" -def is_before_spark_342(): - return spark_version() < "3.4.2" - def is_before_spark_350(): return spark_version() < "3.5.0" def is_before_spark_351(): return spark_version() < "3.5.1" -def is_before_spark_400(): - return spark_version() < "4.0.0" - def is_spark_320_or_later(): return spark_version() >= "3.2.0" @@ -226,9 +217,6 @@ def is_spark_350_or_later(): def is_spark_330(): return spark_version() == "3.3.0" -def is_spark_350(): - return spark_version() == "3.5.0" - def is_spark_33X(): return "3.3.0" <= spark_version() < "3.4.0"