Skip to content

Commit

Permalink
cleanup unused is_before_spark_310 (NVIDIA#1706)
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Graves <tgraves@nvidia.com>
  • Loading branch information
tgravescs authored Feb 10, 2021
1 parent 8e51e4c commit 932ca6d
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 10 deletions.
4 changes: 2 additions & 2 deletions integration_tests/src/main/python/cache_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020, NVIDIA CORPORATION.
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -18,7 +18,7 @@
from data_gen import *
from datetime import date
import pyspark.sql.functions as f
from spark_session import with_cpu_session, with_gpu_session, is_spark_300, is_before_spark_310
from spark_session import with_cpu_session, with_gpu_session, is_spark_300
from join_test import create_df
from generate_expr_test import four_op_df
from marks import incompat, allow_non_gpu, ignore_order
Expand Down
4 changes: 2 additions & 2 deletions integration_tests/src/main/python/date_time_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020, NVIDIA CORPORATION.
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -18,7 +18,7 @@
from datetime import date, datetime, timezone
from marks import incompat
from pyspark.sql.types import *
from spark_session import with_spark_session, is_before_spark_310
from spark_session import with_spark_session
import pyspark.sql.functions as f

# We only support literal intervals for TimeSub
Expand Down
3 changes: 1 addition & 2 deletions integration_tests/src/main/python/generate_expr_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020, NVIDIA CORPORATION.
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -14,7 +14,6 @@

import pytest

from spark_session import is_before_spark_310
from asserts import assert_gpu_and_cpu_are_equal_collect
from data_gen import *
from marks import ignore_order
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/src/main/python/parquet_write_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from data_gen import *
from marks import *
from pyspark.sql.types import *
from spark_session import with_cpu_session, with_gpu_session, is_before_spark_310
from spark_session import with_cpu_session, with_gpu_session
import random

# test with original parquet file reader, the multi-file parallel reader for cloud, and coalesce file reader for
Expand Down
4 changes: 2 additions & 2 deletions integration_tests/src/main/python/qa_nightly_sql.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020, NVIDIA CORPORATION.
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -13,7 +13,7 @@
# limitations under the License.

from conftest import is_databricks_runtime
from spark_session import with_spark_session, is_before_spark_310
from spark_session import with_spark_session
import pytest

SELECT_SQL = [
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/src/main/python/tpch_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from asserts import assert_gpu_and_cpu_are_equal_collect
from conftest import is_databricks_runtime
from marks import approximate_float, incompat, ignore_order, allow_non_gpu, allow_non_gpu_databricks
from spark_session import with_spark_session, is_before_spark_310
from spark_session import with_spark_session

_base_conf = {'spark.rapids.sql.variableFloatAgg.enabled': 'true',
'spark.rapids.sql.hasNans': 'false',
Expand Down

0 comments on commit 932ca6d

Please sign in to comment.