Skip to content

Commit

Permalink
Move tests in tests module to integration test
Browse files Browse the repository at this point in the history
Signed-off-by: Chong Gao <res_life@163.com>
  • Loading branch information
Chong Gao committed Dec 9, 2021
1 parent 61f51ff commit c426826
Show file tree
Hide file tree
Showing 111 changed files with 170 additions and 390 deletions.
32 changes: 32 additions & 0 deletions dist/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,38 @@
</target>
</configuration>
</execution>

<!-- generate a new jar from the dist jar for the compile purpose of the integration tests module-->
<!-- see the comments in integration_tests module pom.xml-->
<execution>
<id>generate-dist-compile-jar</id>
<phase>package</phase>
<configuration>
<target name="generate-dist-compile-jar">
<exec executable="${project.basedir}/scripts/generate-dist-compile-jar.sh"
dir="${project.build.directory}"
resultproperty="generate-dist-compile-jar.exitCode"
errorproperty="generate-dist-compile-jar.errorMsg"
failonerror="false">

<arg value="${artifactId}-${version}.jar"/>
<arg value="${spark.version.classifier}"/>
</exec>

<fail message="exec generate-dist-compile-jar.sh failed, exit code is ${generate-dist-compile-jar.exitCode}, error msg is ${generate-dist-compile-jar.errorMsg}">
<condition>
<not>
<equals arg1="${generate-dist-compile-jar.exitCode}" arg2="0"/>
</not>
</condition>
</fail>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>

<execution>
<phase>verify</phase>
<goals>
Expand Down
40 changes: 40 additions & 0 deletions dist/scripts/generate-dist-compile-jar.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/usr/bin/env bash

# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#
# generate a jar for the purpose of integration tests compile
# install it to local
# the integration tests will use it
#
set -e

jar_name=$1
classifier=$2
target_path=$PWD
jar_path=$target_path/$jar_name
tmp_path="${target_path}/tmp-dist-for-compile"
mkdir -p "${tmp_path}"
unzip -q ${jar_path} -d "${tmp_path}"
cd $tmp_path
cp -r $classifier/* ./
cp -r spark3xx-common/* ./
rm -rf spark3*
rm libjucx.so
zip -rq dist-for-compile-1.jar *
mv dist-for-compile-1.jar /tmp/
cd ..
rm -rf $tmp_path
mvn install:install-file -Dfile=/tmp/dist-for-compile-1.jar -DgroupId=com.nvidia -DartifactId=dist-for-compile -Dversion=1 -Dpackaging=jar
123 changes: 98 additions & 25 deletions integration_tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -59,37 +59,46 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<!-- use aggregator jar because accessing internal classes -->
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-aggregator_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}</classifier>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.test.version}</version>
<scope>compile</scope>
</dependency>

<!-- compile the test code against v2 shims code but run them solely against the distribution jar-->
<!-- scope is runtime, so compile will not use-->
<!-- run_pyspark_from_build.sh depends on the distribution jar, so make it as runtime scope -->
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf-examples_${scala.binary.version}</artifactId>
<artifactId>rapids-4-spark_2.12</artifactId>
<version>${project.version}</version>
<scope>test</scope>
<scope>runtime</scope>
</dependency>
<!-- this jar is generated for compile the tests -->
<!-- this jar is extracted from dist jar -->
<!-- this jar is installed to local repo in the dist project -->
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-tests_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}tests</classifier>
<type>test-jar</type>
<scope>test</scope>
<artifactId>dist-for-compile</artifactId>
<version>1</version>
</dependency>

<!-- for test case ScalaUDFSuite.scala -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.test.version}</version>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf-examples_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>

Expand Down Expand Up @@ -127,6 +136,22 @@
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>4.3.0.7.2.7.0-184</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark311cdh.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.arrow</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</profile>
Expand Down Expand Up @@ -169,9 +194,39 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
<groupId>org.apache.spark</groupId>
<artifactId>spark-unsafe_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
<artifactId>kryo-shaded-db</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
Expand All @@ -184,20 +239,38 @@
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-memory</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.arrow</groupId>
<artifactId>arrow-vector</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
</profiles>
Expand Down Expand Up @@ -306,7 +379,7 @@
<executions>
<execution>
<id>run pyspark tests</id>
<phase>verify</phase><!--run after packageing and collecting dependencies-->
<phase>verify</phase><!--run after packaging and collecting dependencies-->
<goals>
<goal>exec</goal>
</goals>
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
1 change: 0 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@
<module>shims</module>
<module>shuffle-plugin</module>
<module>sql-plugin</module>
<module>tests</module>
<module>udf-compiler</module>
<module>udf-examples</module>
</modules>
Expand Down
52 changes: 0 additions & 52 deletions tests/README.md

This file was deleted.

Loading

0 comments on commit c426826

Please sign in to comment.