From b422d5048d992019b9b1c6e310b4b07136b17879 Mon Sep 17 00:00:00 2001 From: Peixin Date: Thu, 21 Jul 2022 13:11:58 +0800 Subject: [PATCH] Update spark322shim dependency to released lib (#6031) * Update spark322shim dependency to released one Signed-off-by: Peixin Li * address comment * also update minimumFeatureVersionMix doc --- CONTRIBUTING.md | 4 ++-- build/buildall | 2 ++ dist/pom.xml | 2 +- docs/additional-functionality/rapids-shuffle.md | 1 + jenkins/spark-premerge-build.sh | 2 +- pom.xml | 2 +- .../rapids/shims/spark322/SparkShimServiceProvider.scala | 2 +- 7 files changed, 9 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 571d51acfb6..150d9e56f39 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -86,8 +86,8 @@ There is a build script `build/buildall` that automates the local build process. `./buid/buildall --help` for up-to-date use information. By default, it builds everything that is needed to create a distribution jar for all released (noSnapshots) Spark versions except for Databricks. Other profiles that you can pass using `--profile=` include -- `snapshots` -- `minimumFeatureVersionMix` that currently includes 321cdh, 312, 320 is recommended for catching incompatibilities already in the local development cycle +- `snapshots` that includes all released (noSnapshots) and snapshots Spark versions except for Databricks +- `minimumFeatureVersionMix` that currently includes 321cdh, 312, 320, 330 is recommended for catching incompatibilities already in the local development cycle For initial quick iterations we can use `--profile=` to build a single-shim version. e.g., `--profile=311` for Spark 3.1.1. diff --git a/build/buildall b/build/buildall index d0b90a4ff9b..95cd0b59a72 100755 --- a/build/buildall +++ b/build/buildall @@ -159,6 +159,7 @@ case $DIST_PROFILE in 320 321 322 + 330 331 ) ;; @@ -171,6 +172,7 @@ case $DIST_PROFILE in 313 320 321 + 322 330 ) ;; diff --git a/dist/pom.xml b/dist/pom.xml index d8e364d76db..597c4c82d5b 100644 --- a/dist/pom.xml +++ b/dist/pom.xml @@ -47,11 +47,11 @@ 320, 321, 321cdh, + 322, 330 314, - 322, 331 diff --git a/docs/additional-functionality/rapids-shuffle.md b/docs/additional-functionality/rapids-shuffle.md index d667f251ef8..1c06d4c991d 100644 --- a/docs/additional-functionality/rapids-shuffle.md +++ b/docs/additional-functionality/rapids-shuffle.md @@ -286,6 +286,7 @@ In this section, we are using a docker container built using the sample dockerfi | 3.2.0 | com.nvidia.spark.rapids.spark320.RapidsShuffleManager | | 3.2.1 | com.nvidia.spark.rapids.spark321.RapidsShuffleManager | | 3.2.1 CDH | com.nvidia.spark.rapids.spark321cdh.RapidsShuffleManager | + | 3.2.2 | com.nvidia.spark.rapids.spark322.RapidsShuffleManager | | 3.3.0 | com.nvidia.spark.rapids.spark330.RapidsShuffleManager | | Databricks 9.1 | com.nvidia.spark.rapids.spark312db.RapidsShuffleManager | | Databricks 10.4 | com.nvidia.spark.rapids.spark321db.RapidsShuffleManager | diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index 40ae460e1b9..775f5385263 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -50,7 +50,7 @@ mvn_verify() { # enable UTF-8 for regular expression tests env -u SPARK_HOME LC_ALL="en_US.UTF-8" mvn $MVN_URM_MIRROR -Dbuildver=320 test -Drat.skip=true -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -Dpytest.TEST_TAGS='' -pl '!tools' -DwildcardSuites=com.nvidia.spark.rapids.ConditionalsSuite,com.nvidia.spark.rapids.RegularExpressionSuite,com.nvidia.spark.rapids.RegularExpressionTranspilerSuite env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=321 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am - [[ $BUILD_MAINTENANCE_VERSION_SNAPSHOTS == "true" ]] && env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=322 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am + env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=322 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=330 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am [[ $BUILD_MAINTENANCE_VERSION_SNAPSHOTS == "true" ]] && env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=331 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am [[ $BUILD_FEATURE_VERSION_SNAPSHOTS == "true" ]] && env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Dbuildver=340 clean install -Drat.skip=true -DskipTests -Dmaven.javadoc.skip=true -Dskip -Dmaven.scalastyle.skip=true -Dcuda.version=$CUDA_CLASSIFIER -pl aggregator -am diff --git a/pom.xml b/pom.xml index 75be178baa8..0494b723bdf 100644 --- a/pom.xml +++ b/pom.xml @@ -1008,7 +1008,7 @@ 3.2.1 3.2.1.3.2.7171000.0-3 3.2.1-databricks - 3.2.2-SNAPSHOT + 3.2.2 3.3.0 3.3.1-SNAPSHOT 3.4.0-SNAPSHOT diff --git a/sql-plugin/src/main/322/scala/com/nvidia/spark/rapids/shims/spark322/SparkShimServiceProvider.scala b/sql-plugin/src/main/322/scala/com/nvidia/spark/rapids/shims/spark322/SparkShimServiceProvider.scala index 66d962b0fc6..1df0b81e6b7 100644 --- a/sql-plugin/src/main/322/scala/com/nvidia/spark/rapids/shims/spark322/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/322/scala/com/nvidia/spark/rapids/shims/spark322/SparkShimServiceProvider.scala @@ -20,7 +20,7 @@ import com.nvidia.spark.rapids.SparkShimVersion object SparkShimServiceProvider { val VERSION = SparkShimVersion(3, 2, 2) - val VERSIONNAMES = Seq(s"$VERSION", s"$VERSION-SNAPSHOT") + val VERSIONNAMES = Seq(s"$VERSION") } class SparkShimServiceProvider extends com.nvidia.spark.rapids.SparkShimServiceProvider {